[ 481.855952] env[62974]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62974) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 481.856376] env[62974]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62974) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 481.856473] env[62974]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62974) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 481.856746] env[62974]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 481.951614] env[62974]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62974) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 481.962465] env[62974]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62974) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 482.564132] env[62974]: INFO nova.virt.driver [None req-bf88a807-8afd-4d10-b07e-a1296abf2953 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 482.636386] env[62974]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 482.636542] env[62974]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 482.636643] env[62974]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62974) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 485.739545] env[62974]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-64f70e92-5d1b-4aef-8c6d-893f86d13c82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.756104] env[62974]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62974) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 485.756271] env[62974]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-19f97e11-7b62-4ee9-a278-04eb2dafd11e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.799960] env[62974]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 76c69. [ 485.800163] env[62974]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.164s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 485.800735] env[62974]: INFO nova.virt.vmwareapi.driver [None req-bf88a807-8afd-4d10-b07e-a1296abf2953 None None] VMware vCenter version: 7.0.3 [ 485.804387] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9862d21d-6c28-43ed-b00e-cb01730eed0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.823725] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cbb3a4-eda9-4741-b310-2235a0b0a163 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.830558] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b5a2a4-31e2-44d3-b2be-516c42ad595f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.837633] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabca175-2ebf-4daf-ae35-2e38f3f84db8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.851233] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36233fd1-e79c-4d2b-8d71-00e80147297b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.857578] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e1cca0-f403-4efa-9a64-221538dd757c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.889122] env[62974]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-5af26079-4b54-44d3-9be3-4e61360d28c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.894854] env[62974]: DEBUG nova.virt.vmwareapi.driver [None req-bf88a807-8afd-4d10-b07e-a1296abf2953 None None] Extension org.openstack.compute already exists. {{(pid=62974) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 485.897630] env[62974]: INFO nova.compute.provider_config [None req-bf88a807-8afd-4d10-b07e-a1296abf2953 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 486.400788] env[62974]: DEBUG nova.context [None req-bf88a807-8afd-4d10-b07e-a1296abf2953 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),1f9dac49-30ac-4792-a653-10ca1e2cc7e0(cell1) {{(pid=62974) load_cells /opt/stack/nova/nova/context.py:464}} [ 486.402948] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.403197] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.403909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.404352] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Acquiring lock "1f9dac49-30ac-4792-a653-10ca1e2cc7e0" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.404543] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Lock "1f9dac49-30ac-4792-a653-10ca1e2cc7e0" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.405627] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Lock "1f9dac49-30ac-4792-a653-10ca1e2cc7e0" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.426341] env[62974]: INFO dbcounter [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Registered counter for database nova_cell0 [ 486.434428] env[62974]: INFO dbcounter [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Registered counter for database nova_cell1 [ 486.437917] env[62974]: DEBUG oslo_db.sqlalchemy.engines [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62974) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 486.438332] env[62974]: DEBUG oslo_db.sqlalchemy.engines [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62974) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 486.443345] env[62974]: ERROR nova.db.main.api [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 486.443345] env[62974]: result = function(*args, **kwargs) [ 486.443345] env[62974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 486.443345] env[62974]: return func(*args, **kwargs) [ 486.443345] env[62974]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 486.443345] env[62974]: result = fn(*args, **kwargs) [ 486.443345] env[62974]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 486.443345] env[62974]: return f(*args, **kwargs) [ 486.443345] env[62974]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 486.443345] env[62974]: return db.service_get_minimum_version(context, binaries) [ 486.443345] env[62974]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 486.443345] env[62974]: _check_db_access() [ 486.443345] env[62974]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 486.443345] env[62974]: stacktrace = ''.join(traceback.format_stack()) [ 486.443345] env[62974]: [ 486.444355] env[62974]: ERROR nova.db.main.api [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 486.444355] env[62974]: result = function(*args, **kwargs) [ 486.444355] env[62974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 486.444355] env[62974]: return func(*args, **kwargs) [ 486.444355] env[62974]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 486.444355] env[62974]: result = fn(*args, **kwargs) [ 486.444355] env[62974]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 486.444355] env[62974]: return f(*args, **kwargs) [ 486.444355] env[62974]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 486.444355] env[62974]: return db.service_get_minimum_version(context, binaries) [ 486.444355] env[62974]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 486.444355] env[62974]: _check_db_access() [ 486.444355] env[62974]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 486.444355] env[62974]: stacktrace = ''.join(traceback.format_stack()) [ 486.444355] env[62974]: [ 486.444847] env[62974]: WARNING nova.objects.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Failed to get minimum service version for cell 1f9dac49-30ac-4792-a653-10ca1e2cc7e0 [ 486.445048] env[62974]: WARNING nova.objects.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 486.445474] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Acquiring lock "singleton_lock" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 486.445641] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Acquired lock "singleton_lock" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 486.445881] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Releasing lock "singleton_lock" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 486.446223] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Full set of CONF: {{(pid=62974) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 486.446370] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ******************************************************************************** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 486.446496] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] Configuration options gathered from: {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 486.446631] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 486.446824] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 486.446951] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ================================================================================ {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 486.447177] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] allow_resize_to_same_host = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.447347] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] arq_binding_timeout = 300 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.447478] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] backdoor_port = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.447603] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] backdoor_socket = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.447767] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] block_device_allocate_retries = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.447925] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] block_device_allocate_retries_interval = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.448105] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cert = self.pem {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.448272] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.448436] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute_monitors = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.448602] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] config_dir = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.448771] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] config_drive_format = iso9660 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.448904] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.449078] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] config_source = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.449247] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] console_host = devstack {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.449409] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] control_exchange = nova {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.449566] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cpu_allocation_ratio = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.449896] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] daemon = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.450112] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] debug = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.450278] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] default_access_ip_network_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.450442] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] default_availability_zone = nova {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.450597] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] default_ephemeral_format = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.450758] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] default_green_pool_size = 1000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.451008] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.451181] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] default_schedule_zone = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.451340] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] disk_allocation_ratio = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.451500] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] enable_new_services = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.451677] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] enabled_apis = ['osapi_compute'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.451843] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] enabled_ssl_apis = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.452009] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] flat_injected = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.452179] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] force_config_drive = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.452339] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] force_raw_images = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.452508] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] graceful_shutdown_timeout = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.452726] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] heal_instance_info_cache_interval = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.453081] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] host = cpu-1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.453277] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.453447] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] initial_disk_allocation_ratio = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.453610] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] initial_ram_allocation_ratio = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.453839] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.454022] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instance_build_timeout = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.454189] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instance_delete_interval = 300 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.454356] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instance_format = [instance: %(uuid)s] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.454523] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instance_name_template = instance-%08x {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.454717] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instance_usage_audit = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.454917] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instance_usage_audit_period = month {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.455132] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.455284] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] instances_path = /opt/stack/data/nova/instances {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.455449] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] internal_service_availability_zone = internal {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.455627] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] key = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.455806] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] live_migration_retry_count = 30 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.455978] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_color = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.456159] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_config_append = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.456326] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.456485] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_dir = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.456642] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_file = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.456772] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_options = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.456934] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_rotate_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.457118] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_rotate_interval_type = days {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.457290] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] log_rotation_type = none {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.457420] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.457545] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.457714] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.457876] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.458012] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.458180] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] long_rpc_timeout = 1800 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.458343] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] max_concurrent_builds = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.458502] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] max_concurrent_live_migrations = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.458662] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] max_concurrent_snapshots = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.458826] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] max_local_block_devices = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.458985] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] max_logfile_count = 30 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.459159] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] max_logfile_size_mb = 200 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.459320] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] maximum_instance_delete_attempts = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.459486] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] metadata_listen = 0.0.0.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.459653] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] metadata_listen_port = 8775 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.459821] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] metadata_workers = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.459980] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] migrate_max_retries = -1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.460158] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] mkisofs_cmd = genisoimage {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.460360] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] my_block_storage_ip = 10.180.1.21 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.460493] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] my_ip = 10.180.1.21 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.460692] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.460856] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] network_allocate_retries = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.461040] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.461210] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] osapi_compute_listen = 0.0.0.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.461369] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] osapi_compute_listen_port = 8774 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.461534] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] osapi_compute_unique_server_name_scope = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.461702] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] osapi_compute_workers = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.461864] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] password_length = 12 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.462031] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] periodic_enable = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.462194] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] periodic_fuzzy_delay = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.462362] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] pointer_model = usbtablet {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.462527] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] preallocate_images = none {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.462712] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] publish_errors = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.462850] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] pybasedir = /opt/stack/nova {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.463026] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ram_allocation_ratio = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.463197] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] rate_limit_burst = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.463366] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] rate_limit_except_level = CRITICAL {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.463527] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] rate_limit_interval = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.463684] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] reboot_timeout = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.463845] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] reclaim_instance_interval = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.464006] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] record = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.464187] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] reimage_timeout_per_gb = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.464353] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] report_interval = 120 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.464515] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] rescue_timeout = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.464715] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] reserved_host_cpus = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.464889] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] reserved_host_disk_mb = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.465061] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] reserved_host_memory_mb = 512 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.465224] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] reserved_huge_pages = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.465383] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] resize_confirm_window = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.465541] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] resize_fs_using_block_device = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.465695] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] resume_guests_state_on_host_boot = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.465876] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.466049] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] rpc_response_timeout = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.466212] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] run_external_periodic_tasks = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.466379] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] running_deleted_instance_action = reap {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.466540] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] running_deleted_instance_poll_interval = 1800 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.466699] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] running_deleted_instance_timeout = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.466860] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler_instance_sync_interval = 120 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.467036] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_down_time = 720 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.467208] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] servicegroup_driver = db {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.467365] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] shell_completion = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.467522] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] shelved_offload_time = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.467677] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] shelved_poll_interval = 3600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.467844] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] shutdown_timeout = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.467999] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] source_is_ipv6 = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.468170] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ssl_only = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.468419] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.468591] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] sync_power_state_interval = 600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.468756] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] sync_power_state_pool_size = 1000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.468925] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] syslog_log_facility = LOG_USER {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.469093] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] tempdir = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.469256] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] timeout_nbd = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.469422] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] transport_url = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.469582] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] update_resources_interval = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.469739] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] use_cow_images = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.469894] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] use_journal = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.470060] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] use_json = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.470220] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] use_rootwrap_daemon = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.470373] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] use_stderr = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.470526] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] use_syslog = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.470678] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vcpu_pin_set = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.470843] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plugging_is_fatal = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.471014] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plugging_timeout = 300 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.471186] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] virt_mkfs = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.471347] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] volume_usage_poll_interval = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.471504] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] watch_log_file = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.471668] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] web = /usr/share/spice-html5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 486.471851] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.472033] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.472203] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.472370] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_concurrency.disable_process_locking = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.473026] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.473234] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.473411] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.473590] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.473765] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.473934] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.474136] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.auth_strategy = keystone {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.474302] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.compute_link_prefix = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.474476] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.474682] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.dhcp_domain = novalocal {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.474857] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.enable_instance_password = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.475033] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.glance_link_prefix = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.475207] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.475382] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.475545] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.instance_list_per_project_cells = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.475707] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.list_records_by_skipping_down_cells = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.475871] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.local_metadata_per_cell = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.476050] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.max_limit = 1000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.476221] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.metadata_cache_expiration = 15 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.476393] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.neutron_default_tenant_id = default {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.476562] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.response_validation = warn {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.476763] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.use_neutron_default_nets = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.476952] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.477127] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.477297] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.477474] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.477645] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.vendordata_dynamic_targets = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.477810] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.vendordata_jsonfile_path = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.477988] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.478201] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.backend = dogpile.cache.memcached {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.478370] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.backend_argument = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.478531] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.backend_expiration_time = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.478702] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.config_prefix = cache.oslo {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.478874] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.dead_timeout = 60.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.479049] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.debug_cache_backend = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.479216] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.enable_retry_client = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.479376] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.enable_socket_keepalive = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.479545] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.enabled = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.479708] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.enforce_fips_mode = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.479877] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.expiration_time = 600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.480050] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.hashclient_retry_attempts = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.480221] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.hashclient_retry_delay = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.480386] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_dead_retry = 300 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.480544] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_password = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.480705] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.480866] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.481038] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_pool_maxsize = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.481204] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.481364] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_sasl_enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.481538] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.481704] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_socket_timeout = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.481868] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.memcache_username = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.482039] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.proxies = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.482206] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.redis_db = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.482363] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.redis_password = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.482532] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.redis_sentinel_service_name = mymaster {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.482730] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.482905] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.redis_server = localhost:6379 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.483083] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.redis_socket_timeout = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.483245] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.redis_username = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.483406] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.retry_attempts = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.483568] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.retry_delay = 0.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.483731] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.socket_keepalive_count = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.483890] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.socket_keepalive_idle = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.484062] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.socket_keepalive_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.484224] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.tls_allowed_ciphers = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.484378] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.tls_cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.484531] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.tls_certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.484720] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.tls_enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.484887] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cache.tls_keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.485070] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.485248] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.auth_type = password {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.485411] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.485604] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.catalog_info = volumev3::publicURL {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.485785] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.485952] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.486127] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.cross_az_attach = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.486291] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.debug = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.486448] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.endpoint_template = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.486612] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.http_retries = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.486775] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.486933] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.487172] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.os_region_name = RegionOne {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.487351] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.487511] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cinder.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.487686] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.487849] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.cpu_dedicated_set = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.488017] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.cpu_shared_set = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.488189] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.image_type_exclude_list = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.488351] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.488512] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.max_concurrent_disk_ops = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.488681] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.max_disk_devices_to_attach = -1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.488871] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.489057] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.489226] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.resource_provider_association_refresh = 300 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.489388] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.489550] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.shutdown_retry_interval = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.489730] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.489911] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] conductor.workers = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.490100] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] console.allowed_origins = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.490263] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] console.ssl_ciphers = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.490433] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] console.ssl_minimum_version = default {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.490600] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] consoleauth.enforce_session_timeout = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.490768] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] consoleauth.token_ttl = 600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.490938] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.491109] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.491272] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.491432] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.491589] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.491745] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.491904] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.492070] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.492231] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.492388] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.492543] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.492725] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.492885] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.493068] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.service_type = accelerator {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.493233] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.493389] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.493546] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.493703] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.493880] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.494049] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] cyborg.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.494221] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.asyncio_connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.494380] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.asyncio_slave_connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.494550] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.backend = sqlalchemy {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.494756] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.494920] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.connection_debug = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.495107] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.connection_parameters = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.495277] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.connection_recycle_time = 3600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.495441] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.connection_trace = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.495620] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.db_inc_retry_interval = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.495802] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.db_max_retries = 20 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.495967] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.db_max_retry_interval = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.496149] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.db_retry_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.496316] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.max_overflow = 50 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.496481] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.max_pool_size = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.496643] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.max_retries = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.496816] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.496974] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.mysql_wsrep_sync_wait = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.497167] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.pool_timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.497349] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.retry_interval = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.497508] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.slave_connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.497668] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.sqlite_synchronous = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.497830] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] database.use_db_reconnect = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.497998] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.asyncio_connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.498174] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.asyncio_slave_connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.498345] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.backend = sqlalchemy {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.498514] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.498678] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.connection_debug = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.498848] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.connection_parameters = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.499020] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.connection_recycle_time = 3600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.499187] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.connection_trace = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.499347] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.db_inc_retry_interval = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.499511] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.db_max_retries = 20 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.499676] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.db_max_retry_interval = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.499841] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.db_retry_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.500010] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.max_overflow = 50 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.500181] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.max_pool_size = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.500343] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.max_retries = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.500511] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.500669] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.500828] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.pool_timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.500986] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.retry_interval = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.501282] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.slave_connection = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.501340] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] api_database.sqlite_synchronous = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.501480] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] devices.enabled_mdev_types = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.501658] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.501830] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ephemeral_storage_encryption.default_format = luks {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.501994] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ephemeral_storage_encryption.enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.502173] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.502345] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.api_servers = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.502506] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.502665] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.502828] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.502984] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.503154] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.503313] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.debug = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.503477] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.default_trusted_certificate_ids = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.503639] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.enable_certificate_validation = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.503803] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.enable_rbd_download = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.503962] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.504142] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.504304] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.504462] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.504653] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.504827] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.num_retries = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.504999] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.rbd_ceph_conf = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.505179] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.rbd_connect_timeout = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.505350] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.rbd_pool = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.505518] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.rbd_user = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.505719] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.505889] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.506061] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.506236] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.service_type = image {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.506400] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.506557] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.506715] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.506874] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.507064] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.507248] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.verify_glance_signatures = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.507426] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] glance.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.507596] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] guestfs.debug = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.507768] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.507929] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.auth_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.508100] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.508259] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.508422] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.508581] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.508740] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.508899] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.509072] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.509234] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.509391] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.509548] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.509705] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.509864] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.510029] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.510203] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.service_type = shared-file-system {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.510369] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.share_apply_policy_timeout = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.510536] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.510690] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.510847] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.511012] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.511198] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.511357] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] manila.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.511560] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] mks.enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.511875] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.512081] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] image_cache.manager_interval = 2400 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.512255] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] image_cache.precache_concurrency = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.512428] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] image_cache.remove_unused_base_images = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.512627] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.512805] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.512986] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] image_cache.subdirectory_name = _base {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.513178] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.api_max_retries = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.513346] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.api_retry_interval = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.513509] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.513671] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.auth_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.513834] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.513995] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.514173] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.514339] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.conductor_group = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.514501] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.514696] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.514859] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.515035] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.515201] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.515362] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.515519] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.515714] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.peer_list = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.515883] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.516054] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.516224] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.serial_console_state_timeout = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.516384] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.516554] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.service_type = baremetal {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.516713] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.shard = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.516877] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.517045] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.517207] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.517381] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.517578] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.517744] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ironic.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.517924] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.518109] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] key_manager.fixed_key = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.518294] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.518456] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.barbican_api_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.518616] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.barbican_endpoint = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.518787] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.barbican_endpoint_type = public {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.518947] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.barbican_region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.519121] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.519281] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.519442] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.519602] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.519761] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.519922] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.number_of_retries = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.520093] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.retry_delay = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.520258] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.send_service_user_token = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.520418] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.520575] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.520735] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.verify_ssl = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.520892] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican.verify_ssl_path = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.521067] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.521234] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.auth_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.521392] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.521549] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.521711] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.521871] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.522039] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.522206] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.522363] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] barbican_service_user.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.522548] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.approle_role_id = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.522719] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.approle_secret_id = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.522891] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.kv_mountpoint = secret {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.523064] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.kv_path = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.523233] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.kv_version = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.523394] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.namespace = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.523553] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.root_token_id = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.523712] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.ssl_ca_crt_file = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.523881] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.timeout = 60.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.524055] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.use_ssl = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.524236] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.524405] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.524589] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.524762] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.524925] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.525094] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.525257] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.525417] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.525585] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.525761] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.525919] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.526088] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.526253] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.526416] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.526585] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.service_type = identity {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.526748] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.526907] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.527075] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.527234] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.527425] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.527600] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] keystone.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.527790] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.ceph_mount_options = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.528112] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.528297] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.connection_uri = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.528462] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.cpu_mode = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.528630] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.cpu_model_extra_flags = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.528799] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.cpu_models = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.528969] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.cpu_power_governor_high = performance {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.529153] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.cpu_power_governor_low = powersave {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.529317] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.cpu_power_management = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.529488] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.529658] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.device_detach_attempts = 8 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.529826] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.device_detach_timeout = 20 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.529990] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.disk_cachemodes = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.530164] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.disk_prefix = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.530328] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.enabled_perf_events = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.530489] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.file_backed_memory = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.530653] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.gid_maps = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.530814] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.hw_disk_discard = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.530972] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.hw_machine_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.531156] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.images_rbd_ceph_conf = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.531320] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.531481] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.531646] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.images_rbd_glance_store_name = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.531815] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.images_rbd_pool = rbd {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.531984] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.images_type = default {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.532158] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.images_volume_group = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.532322] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.inject_key = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.532507] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.inject_partition = -2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.532687] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.inject_password = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.532853] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.iscsi_iface = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.533021] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.iser_use_multipath = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.533188] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_bandwidth = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.533350] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.533510] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_downtime = 500 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.533671] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.533831] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.533988] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_inbound_addr = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.534162] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.534321] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_permit_post_copy = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.534478] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_scheme = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.534686] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_timeout_action = abort {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.534857] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_tunnelled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.535028] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_uri = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.535194] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.live_migration_with_native_tls = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.535354] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.max_queues = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.535518] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.535794] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.535959] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.nfs_mount_options = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.536270] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.536448] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.536614] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.num_iser_scan_tries = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.536776] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.num_memory_encrypted_guests = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.536941] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.537116] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.num_pcie_ports = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.537285] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.num_volume_scan_tries = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.537463] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.pmem_namespaces = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.537639] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.quobyte_client_cfg = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.537937] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.538130] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rbd_connect_timeout = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.538297] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.538460] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.538620] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rbd_secret_uuid = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.538778] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rbd_user = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.538939] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.539125] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.remote_filesystem_transport = ssh {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.539287] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rescue_image_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.539442] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rescue_kernel_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.539597] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rescue_ramdisk_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.539765] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.539924] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.rx_queue_size = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.540103] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.smbfs_mount_options = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.540399] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.540576] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.snapshot_compression = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.540739] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.snapshot_image_format = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.540974] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.541157] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.sparse_logical_volumes = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.541321] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.swtpm_enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.541490] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.swtpm_group = tss {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.541657] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.swtpm_user = tss {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.541844] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.sysinfo_serial = unique {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.541983] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.tb_cache_size = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.542154] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.tx_queue_size = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.542317] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.uid_maps = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.542477] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.use_virtio_for_bridges = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.542645] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.virt_type = kvm {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.542812] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.volume_clear = zero {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.542975] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.volume_clear_size = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.543153] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.volume_use_multipath = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.543313] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.vzstorage_cache_path = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.543479] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.543646] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.vzstorage_mount_group = qemu {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.543809] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.vzstorage_mount_opts = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.543973] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.544279] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.544461] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.vzstorage_mount_user = stack {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.544658] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.544845] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.545037] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.auth_type = password {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.545208] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.545372] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.545536] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.545734] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.545897] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.546082] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.default_floating_pool = public {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.546245] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.546411] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.extension_sync_interval = 600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.546574] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.http_retries = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.546736] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.546902] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.547076] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.547256] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.547417] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.547603] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.ovs_bridge = br-int {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.547787] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.physnets = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.547960] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.region_name = RegionOne {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.548135] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.548305] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.service_metadata_proxy = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.548467] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.548634] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.service_type = network {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.548797] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.548955] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.549126] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.549287] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.549468] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.549627] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] neutron.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.549799] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] notifications.bdms_in_notifications = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.549975] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] notifications.default_level = INFO {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.550151] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] notifications.include_share_mapping = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.550327] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] notifications.notification_format = unversioned {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.550491] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] notifications.notify_on_state_change = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.550688] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.550851] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] pci.alias = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.551030] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] pci.device_spec = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.551200] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] pci.report_in_placement = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.551373] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.551553] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.auth_type = password {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.551715] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.551877] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.552050] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.552217] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.552376] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.552533] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.552688] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.default_domain_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.552845] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.default_domain_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.553008] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.domain_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.553171] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.domain_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.553331] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.553492] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.553650] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.553809] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.553967] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.554149] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.password = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.554314] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.project_domain_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.554480] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.project_domain_name = Default {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.554679] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.project_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.554863] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.project_name = service {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.555048] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.region_name = RegionOne {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.555216] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.555378] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.555553] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.service_type = placement {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.555745] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.555914] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.556090] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.556256] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.system_scope = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.556417] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.556576] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.trust_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.556738] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.user_domain_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.556907] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.user_domain_name = Default {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.557078] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.user_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.557256] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.username = nova {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.557437] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.557599] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] placement.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.557792] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.cores = 20 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.557974] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.count_usage_from_placement = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.558163] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.558334] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.injected_file_content_bytes = 10240 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.558502] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.injected_file_path_length = 255 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.558668] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.injected_files = 5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.558836] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.instances = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.559007] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.key_pairs = 100 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.559183] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.metadata_items = 128 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.559349] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.ram = 51200 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.559512] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.recheck_quota = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.559679] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.server_group_members = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.559847] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.server_groups = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.560063] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.560241] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] quota.unified_limits_resource_strategy = require {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.560420] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.560584] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.560745] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.image_metadata_prefilter = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.560906] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.561086] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.max_attempts = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.561254] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.max_placement_results = 1000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.561416] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.561576] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.query_placement_for_image_type_support = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.561737] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.561908] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] scheduler.workers = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.562090] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.562262] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.562437] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.562625] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.562807] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.562974] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.563153] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.563344] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.563513] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.host_subset_size = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.563680] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.563840] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.564011] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.564186] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.isolated_hosts = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.564349] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.isolated_images = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.564524] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.564719] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.564889] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.565069] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.pci_in_placement = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.565237] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.565400] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.565586] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.565755] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.565920] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.566094] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.566260] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.track_instance_changes = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.566438] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.566609] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] metrics.required = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.566775] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] metrics.weight_multiplier = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.566935] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.567111] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] metrics.weight_setting = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.567423] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.567595] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] serial_console.enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.567771] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] serial_console.port_range = 10000:20000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.567956] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.568158] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.568326] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] serial_console.serialproxy_port = 6083 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.568490] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.568659] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.auth_type = password {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.568818] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.568975] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.569149] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.569308] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.569464] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.569632] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.send_service_user_token = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.569795] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.569953] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] service_user.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.570145] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.agent_enabled = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.570309] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.570621] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.570828] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.570999] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.html5proxy_port = 6082 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.571177] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.image_compression = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.571339] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.jpeg_compression = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.571499] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.playback_compression = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.571661] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.require_secure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.571831] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.server_listen = 127.0.0.1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.571999] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.572176] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.streaming_mode = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.572336] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] spice.zlib_compression = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.572504] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] upgrade_levels.baseapi = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.572702] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] upgrade_levels.compute = auto {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.572873] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] upgrade_levels.conductor = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.573044] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] upgrade_levels.scheduler = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.573216] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.573380] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.auth_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.573540] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.573697] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.573858] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.574025] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.574188] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.574350] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.574507] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vendordata_dynamic_auth.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.574723] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.api_retry_count = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.574886] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.ca_file = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.575072] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.cache_prefix = devstack-image-cache {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.575245] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.cluster_name = testcl1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.575413] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.connection_pool_size = 10 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.575594] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.console_delay_seconds = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.575776] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.datastore_regex = ^datastore.* {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.575988] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.576180] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.host_password = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.576349] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.host_port = 443 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.576526] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.host_username = administrator@vsphere.local {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.576702] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.insecure = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.576893] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.integration_bridge = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.577044] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.maximum_objects = 100 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.577210] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.pbm_default_policy = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.577371] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.pbm_enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.577528] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.pbm_wsdl_location = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.577696] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.577856] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.serial_port_proxy_uri = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.578035] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.serial_port_service_uri = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.578223] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.task_poll_interval = 0.5 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.578399] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.use_linked_clone = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.578568] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.vnc_keymap = en-us {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.578735] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.vnc_port = 5900 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.578898] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vmware.vnc_port_total = 10000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.579097] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.auth_schemes = ['none'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.579274] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.579573] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.579763] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.579933] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.novncproxy_port = 6080 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.580147] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.server_listen = 127.0.0.1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.580338] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.580501] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.vencrypt_ca_certs = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.580663] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.vencrypt_client_cert = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.580824] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vnc.vencrypt_client_key = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.581007] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.581180] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.disable_deep_image_inspection = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.581344] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.581505] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.581667] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.581828] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.disable_rootwrap = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.582049] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.enable_numa_live_migration = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.582248] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.582409] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.582584] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.582766] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.libvirt_disable_apic = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.582931] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.583111] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.583279] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.583442] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.583603] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.583767] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.583931] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.584608] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.584608] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.584608] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.584749] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.584843] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.client_socket_timeout = 900 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.584974] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.default_pool_size = 1000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.585141] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.keep_alive = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.585313] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.max_header_line = 16384 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.585507] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.secure_proxy_ssl_header = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.585687] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.ssl_ca_file = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.585855] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.ssl_cert_file = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.586027] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.ssl_key_file = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.586201] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.tcp_keepidle = 600 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.586381] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.586549] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] zvm.ca_file = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.586710] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] zvm.cloud_connector_url = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.587013] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.587198] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] zvm.reachable_timeout = 300 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.587373] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.587558] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.587738] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.connection_string = messaging:// {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.587903] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.enabled = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.588098] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.es_doc_type = notification {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.588282] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.es_scroll_size = 10000 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.588452] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.es_scroll_time = 2m {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.588612] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.filter_error_trace = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.588776] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.hmac_keys = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.588941] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.sentinel_service_name = mymaster {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.589116] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.socket_timeout = 0.1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.589278] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.trace_requests = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.589436] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler.trace_sqlalchemy = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.589607] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler_jaeger.process_tags = {} {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.589765] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler_jaeger.service_name_prefix = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.589925] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] profiler_otlp.service_name_prefix = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.590100] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] remote_debug.host = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.590261] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] remote_debug.port = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.590438] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.590598] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.590761] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.590919] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.591089] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.591248] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.591406] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.591569] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.591728] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.591895] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.592064] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.592235] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.592403] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.592576] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.592764] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.592925] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.593102] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.593274] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.593435] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.593607] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.593776] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.593938] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.594112] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.594276] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.594437] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.594647] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.594804] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.594966] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.595144] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.595307] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.ssl = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.595478] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.595664] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.595843] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.596015] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.596188] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.ssl_version = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.596351] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.596534] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.596703] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_notifications.retry = -1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.596879] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.597059] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_messaging_notifications.transport_url = **** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.597235] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.auth_section = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.597400] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.auth_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.597560] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.cafile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.597720] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.certfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.597881] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.collect_timing = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.598067] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.connect_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.598237] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.connect_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.598400] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.endpoint_id = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.598570] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.endpoint_interface = publicURL {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.598729] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.endpoint_override = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.598885] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.endpoint_region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.599054] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.endpoint_service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.599215] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.endpoint_service_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.599377] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.insecure = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.599534] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.keyfile = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.599691] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.max_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.599848] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.min_version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.600012] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.region_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.600175] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.retriable_status_codes = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.600334] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.service_name = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.600492] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.service_type = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.600653] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.split_loggers = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.600810] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.status_code_retries = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.600968] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.status_code_retry_delay = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.601141] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.timeout = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.601298] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.valid_interfaces = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.601454] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_limit.version = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.601618] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_reports.file_event_handler = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.601785] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.601945] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] oslo_reports.log_dir = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.602130] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.602291] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.602451] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.602654] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.602835] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.602998] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.603183] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.603343] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_ovs_privileged.group = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.603500] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.603665] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.603829] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.603988] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] vif_plug_ovs_privileged.user = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.604171] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.flat_interface = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.604347] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.604517] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.604722] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.604896] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.605122] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.605343] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.605508] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.605706] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.605884] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_ovs.isolate_vif = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.606063] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.606234] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.606399] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.606567] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_ovs.ovsdb_interface = native {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.606727] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] os_vif_ovs.per_port_bridge = False {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.606897] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] privsep_osbrick.capabilities = [21] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.607067] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] privsep_osbrick.group = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.607227] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] privsep_osbrick.helper_command = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.607391] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.607555] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.607715] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] privsep_osbrick.user = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.607886] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.608055] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] nova_sys_admin.group = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.608219] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] nova_sys_admin.helper_command = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.608377] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.608538] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.608695] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] nova_sys_admin.user = None {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 486.608823] env[62974]: DEBUG oslo_service.service [None req-2d7a2c9b-ae84-4fe5-a708-45be59848c8d None None] ******************************************************************************** {{(pid=62974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 486.609254] env[62974]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 487.113311] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Getting list of instances from cluster (obj){ [ 487.113311] env[62974]: value = "domain-c8" [ 487.113311] env[62974]: _type = "ClusterComputeResource" [ 487.113311] env[62974]: } {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 487.114394] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b452487-cecc-4c84-90d3-8e9b62044d1f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 487.123834] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Got total of 0 instances {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 487.124461] env[62974]: WARNING nova.virt.vmwareapi.driver [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 487.124960] env[62974]: INFO nova.virt.node [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Generated node identity bd3bd9ae-180c-41cf-831e-3dd3892efa18 [ 487.125205] env[62974]: INFO nova.virt.node [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Wrote node identity bd3bd9ae-180c-41cf-831e-3dd3892efa18 to /opt/stack/data/n-cpu-1/compute_id [ 487.630233] env[62974]: WARNING nova.compute.manager [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Compute nodes ['bd3bd9ae-180c-41cf-831e-3dd3892efa18'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 488.638769] env[62974]: INFO nova.compute.manager [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 489.645361] env[62974]: WARNING nova.compute.manager [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 489.645711] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 489.645856] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 489.646015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 489.646167] env[62974]: DEBUG nova.compute.resource_tracker [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 489.647186] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc8155e-03b6-4f0f-b25b-c1c24d00c70e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.655827] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a54e22d-7cd1-433a-87cc-c66329a4bae3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.671458] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150057dc-4102-4849-9869-569d7e8be2da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.677899] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9db46a-cf50-4738-b5f7-b7136e007ef7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.709017] env[62974]: DEBUG nova.compute.resource_tracker [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181164MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 489.709017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 489.709017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 490.208950] env[62974]: WARNING nova.compute.resource_tracker [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] No compute node record for cpu-1:bd3bd9ae-180c-41cf-831e-3dd3892efa18: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host bd3bd9ae-180c-41cf-831e-3dd3892efa18 could not be found. [ 490.712615] env[62974]: INFO nova.compute.resource_tracker [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: bd3bd9ae-180c-41cf-831e-3dd3892efa18 [ 492.220183] env[62974]: DEBUG nova.compute.resource_tracker [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 492.220562] env[62974]: DEBUG nova.compute.resource_tracker [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 492.491743] env[62974]: INFO nova.scheduler.client.report [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] [req-673aa8a1-2495-4159-8e17-7a067328ff91] Created resource provider record via placement API for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 492.508812] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202966da-bd73-483f-b3cc-fc026171c6e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.517458] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1bd46b-1040-41ed-ab2e-ef7ec8746629 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.549133] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e289107e-b753-4283-a3a7-4c985f88c4d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.556770] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d8e05d-71c5-4302-8868-d4fc7021d974 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.570080] env[62974]: DEBUG nova.compute.provider_tree [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 493.115205] env[62974]: DEBUG nova.scheduler.client.report [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 493.115432] env[62974]: DEBUG nova.compute.provider_tree [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 0 to 1 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 493.115571] env[62974]: DEBUG nova.compute.provider_tree [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 493.165407] env[62974]: DEBUG nova.compute.provider_tree [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 1 to 2 during operation: update_traits {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 493.671021] env[62974]: DEBUG nova.compute.resource_tracker [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 493.671021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.963s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 493.671021] env[62974]: DEBUG nova.service [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Creating RPC server for service compute {{(pid=62974) start /opt/stack/nova/nova/service.py:186}} [ 493.684745] env[62974]: DEBUG nova.service [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] Join ServiceGroup membership for this service compute {{(pid=62974) start /opt/stack/nova/nova/service.py:203}} [ 493.684929] env[62974]: DEBUG nova.servicegroup.drivers.db [None req-cf21b999-d2fc-4fa4-bf2e-0de8e7a95304 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62974) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 513.688517] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.191472] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Getting list of instances from cluster (obj){ [ 514.191472] env[62974]: value = "domain-c8" [ 514.191472] env[62974]: _type = "ClusterComputeResource" [ 514.191472] env[62974]: } {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 514.192685] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e0190d-83b9-4022-a1d2-b8e2ed215ca9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.201520] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Got total of 0 instances {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 514.201731] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.202030] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Getting list of instances from cluster (obj){ [ 514.202030] env[62974]: value = "domain-c8" [ 514.202030] env[62974]: _type = "ClusterComputeResource" [ 514.202030] env[62974]: } {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 514.202872] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412decb1-f037-4bf4-8daa-cc79e94a1f81 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.210463] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Got total of 0 instances {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 531.777635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.777635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.280207] env[62974]: DEBUG nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 532.817779] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.818047] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.819600] env[62974]: INFO nova.compute.claims [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.246423] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.247896] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.751067] env[62974]: DEBUG nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 533.883017] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c533f494-f359-4c66-b702-da414dac93bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.891208] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ea0a3c-6f44-4a37-a4f1-6a864733d549 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.931908] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac808c0-0063-4c08-9ec8-7f71e68941f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.940228] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ade2e7-0e86-4d96-9b74-5bb50242c4c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.956159] env[62974]: DEBUG nova.compute.provider_tree [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.277336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.460264] env[62974]: DEBUG nova.scheduler.client.report [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 534.968019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.148s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.968019] env[62974]: DEBUG nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 534.970439] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.693s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.972170] env[62974]: INFO nova.compute.claims [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.477467] env[62974]: DEBUG nova.compute.utils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 535.478680] env[62974]: DEBUG nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 535.478840] env[62974]: DEBUG nova.network.neutron [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 535.608164] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.608502] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.805067] env[62974]: DEBUG nova.policy [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f563d2ef3444b77b3d0fa15328d78b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ae52d42e1b04ef890523d2b5834a5de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 535.825011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.825011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.987940] env[62974]: DEBUG nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 536.098332] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df3b269-6b35-48c4-a392-e74fe4fee86a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.109810] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a44f9f-deee-42e5-8682-d464b783e3e4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.112254] env[62974]: DEBUG nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 536.155433] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0510abe2-4581-4196-9938-9fc5c35a9baf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.164121] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c26cdd2-6204-4734-bb1c-ced39ba3214f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.180724] env[62974]: DEBUG nova.compute.provider_tree [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.330065] env[62974]: DEBUG nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 536.643240] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.684390] env[62974]: DEBUG nova.scheduler.client.report [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 536.762738] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.763479] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.863154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.997259] env[62974]: DEBUG nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 537.039830] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 537.040132] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.040294] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 537.040814] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.040814] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 537.040814] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 537.041100] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 537.041149] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 537.043805] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 537.044641] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 537.044641] env[62974]: DEBUG nova.virt.hardware [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 537.049377] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca86d67b-d2e6-4f7e-88eb-eec5e2110f6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.061022] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ed5c35-d080-434d-99ac-6621c8e556d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.080178] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b666111e-f98f-4e9b-8c87-24d183c2ee8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.196290] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.198565] env[62974]: DEBUG nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 537.199429] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.556s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.200942] env[62974]: INFO nova.compute.claims [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 537.269902] env[62974]: DEBUG nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 537.685509] env[62974]: DEBUG nova.network.neutron [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Successfully created port: 947659a6-f0ce-4065-a591-6a15666e4ac5 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.709028] env[62974]: DEBUG nova.compute.utils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 537.709028] env[62974]: DEBUG nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 537.709028] env[62974]: DEBUG nova.network.neutron [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 537.805232] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.125308] env[62974]: DEBUG nova.policy [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc5ab26774ed4f098ed9b2c733763b80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e40d31e6ca74d9c913e2ac2ae32f84c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 538.216117] env[62974]: DEBUG nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 538.321476] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be38b7f9-ac1e-4142-86f4-a104111171bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.329416] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bb43b1-9463-4588-80b6-f3392a2275ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.363508] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf90181-2e92-479c-8b89-09fbccdd322f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.371139] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742225cb-5c29-4970-b3cc-f83000515177 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.384794] env[62974]: DEBUG nova.compute.provider_tree [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.887958] env[62974]: DEBUG nova.scheduler.client.report [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 539.229454] env[62974]: DEBUG nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 539.264194] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 539.264464] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.264732] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 539.264803] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.265057] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 539.265977] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 539.266231] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 539.266595] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 539.266595] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 539.266665] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 539.266909] env[62974]: DEBUG nova.virt.hardware [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 539.267705] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce50bd07-736f-47ce-bf9b-13181ea37b95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.277882] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f00df6e-8922-4c3b-b138-74e25eb2f668 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.397675] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.198s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.400691] env[62974]: DEBUG nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 539.403568] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.541s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.405037] env[62974]: INFO nova.compute.claims [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.556931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "2313468e-820f-4fff-bdeb-5d542c94584d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.556931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "2313468e-820f-4fff-bdeb-5d542c94584d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.911542] env[62974]: DEBUG nova.compute.utils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 539.914611] env[62974]: DEBUG nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 539.914862] env[62974]: DEBUG nova.network.neutron [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 539.940324] env[62974]: DEBUG nova.network.neutron [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Successfully created port: 4f09f936-5667-4bf0-8972-a2531e87aaee {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.059104] env[62974]: DEBUG nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 540.203034] env[62974]: DEBUG nova.policy [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0cf8258ab4b48eeb4ffb83a4e68c29b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8d8c428903e42ad9b1612d48d00bcaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 540.416946] env[62974]: DEBUG nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 540.542735] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2777d031-a1ac-41ee-880b-94e55a57016b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.552150] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef15c1d-3801-4a9a-b676-4a9f621e72da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.610634] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223115b6-b9e6-46d7-9afd-cf7d8213cd8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.626099] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830e2b22-af0d-4c32-a6ae-8aea4e18b465 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.639857] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.650628] env[62974]: DEBUG nova.compute.provider_tree [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.154610] env[62974]: DEBUG nova.scheduler.client.report [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 541.409174] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.409415] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.429019] env[62974]: DEBUG nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 541.462870] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 541.463489] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.463489] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 541.463489] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.464612] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 541.464612] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 541.464847] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 541.465115] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 541.465491] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 541.465559] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 541.465788] env[62974]: DEBUG nova.virt.hardware [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 541.470131] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b79c9f-d52e-422b-b17c-f41f6bcea1a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.478296] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb007442-decc-4234-820f-43395b8afdd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.485916] env[62974]: DEBUG nova.network.neutron [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Successfully created port: cf420179-c3b4-4a7d-bf15-a2bcdac8faae {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 541.502653] env[62974]: DEBUG nova.network.neutron [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Successfully updated port: 947659a6-f0ce-4065-a591-6a15666e4ac5 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 541.665172] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.665957] env[62974]: DEBUG nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 541.671388] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.867s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.678023] env[62974]: INFO nova.compute.claims [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 541.918027] env[62974]: DEBUG nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 542.010701] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.010701] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.010701] env[62974]: DEBUG nova.network.neutron [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 542.016682] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.016682] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.016682] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 542.016682] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Rebuilding the list of instances to heal {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 542.182547] env[62974]: DEBUG nova.compute.utils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.189777] env[62974]: DEBUG nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 542.189986] env[62974]: DEBUG nova.network.neutron [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 542.440958] env[62974]: DEBUG nova.policy [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf269a93e9d64b87a135c3e207ce1466', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13053a9449eb4b14a13ad720083975db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 542.452974] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.519915] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 542.520164] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 542.520308] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 542.520971] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 542.521174] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 542.521307] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Didn't find any instances for network info cache update. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 542.522164] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.522440] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.522631] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.522816] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.522997] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.523216] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.523397] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 542.523724] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.685389] env[62974]: DEBUG nova.network.neutron [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.694546] env[62974]: DEBUG nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 542.860129] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed87d62-1be1-4f58-bafc-dbd730ff73b3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.872470] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75607ce-7c07-4bfb-a567-9dfe16ff4df6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.915238] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a53760-429f-4e8a-b5f1-1b8f0aadcb45 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.931322] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827f08a6-79d5-4dd6-b560-7d5f4fe566c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.949482] env[62974]: DEBUG nova.compute.provider_tree [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.027180] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.230158] env[62974]: DEBUG nova.compute.manager [req-a15a66f3-1f6f-42e2-a62e-f936cc419bf2 req-965fcd8c-ad0e-4e43-b86c-11ec70f7e86d service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Received event network-vif-plugged-947659a6-f0ce-4065-a591-6a15666e4ac5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 543.230158] env[62974]: DEBUG oslo_concurrency.lockutils [req-a15a66f3-1f6f-42e2-a62e-f936cc419bf2 req-965fcd8c-ad0e-4e43-b86c-11ec70f7e86d service nova] Acquiring lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.230158] env[62974]: DEBUG oslo_concurrency.lockutils [req-a15a66f3-1f6f-42e2-a62e-f936cc419bf2 req-965fcd8c-ad0e-4e43-b86c-11ec70f7e86d service nova] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.230158] env[62974]: DEBUG oslo_concurrency.lockutils [req-a15a66f3-1f6f-42e2-a62e-f936cc419bf2 req-965fcd8c-ad0e-4e43-b86c-11ec70f7e86d service nova] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.230158] env[62974]: DEBUG nova.compute.manager [req-a15a66f3-1f6f-42e2-a62e-f936cc419bf2 req-965fcd8c-ad0e-4e43-b86c-11ec70f7e86d service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] No waiting events found dispatching network-vif-plugged-947659a6-f0ce-4065-a591-6a15666e4ac5 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 543.230387] env[62974]: WARNING nova.compute.manager [req-a15a66f3-1f6f-42e2-a62e-f936cc419bf2 req-965fcd8c-ad0e-4e43-b86c-11ec70f7e86d service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Received unexpected event network-vif-plugged-947659a6-f0ce-4065-a591-6a15666e4ac5 for instance with vm_state building and task_state spawning. [ 543.345278] env[62974]: DEBUG nova.network.neutron [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [{"id": "947659a6-f0ce-4065-a591-6a15666e4ac5", "address": "fa:16:3e:f1:cd:d9", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947659a6-f0", "ovs_interfaceid": "947659a6-f0ce-4065-a591-6a15666e4ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.454033] env[62974]: DEBUG nova.scheduler.client.report [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 543.487581] env[62974]: DEBUG nova.network.neutron [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Successfully updated port: 4f09f936-5667-4bf0-8972-a2531e87aaee {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 543.714675] env[62974]: DEBUG nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 543.756278] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 543.759416] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.759648] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 543.759857] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.760010] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 543.760163] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 543.760387] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 543.760558] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 543.761418] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 543.761853] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 543.762301] env[62974]: DEBUG nova.virt.hardware [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 543.764159] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24d6f49-b166-48d3-863c-d216d8e8cb4a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.779240] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bce02c5-1a91-4e9f-b51e-d7b50b8d5bc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.852843] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.853200] env[62974]: DEBUG nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Instance network_info: |[{"id": "947659a6-f0ce-4065-a591-6a15666e4ac5", "address": "fa:16:3e:f1:cd:d9", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947659a6-f0", "ovs_interfaceid": "947659a6-f0ce-4065-a591-6a15666e4ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 543.853700] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:cd:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '947659a6-f0ce-4065-a591-6a15666e4ac5', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 543.869667] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 543.869911] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee04abee-4365-4974-a4c2-2b93a394f1cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.883514] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Created folder: OpenStack in parent group-v4. [ 543.883514] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Creating folder: Project (7ae52d42e1b04ef890523d2b5834a5de). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 543.883970] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-424f3706-4cd6-45d4-a5f1-4a3de197050d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.895775] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Created folder: Project (7ae52d42e1b04ef890523d2b5834a5de) in parent group-v535199. [ 543.897311] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Creating folder: Instances. Parent ref: group-v535200. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 543.897311] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-311ae613-572e-4ace-b383-6c1c0c7af786 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.907427] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Created folder: Instances in parent group-v535200. [ 543.907427] env[62974]: DEBUG oslo.service.loopingcall [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 543.907427] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 543.907427] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbbeef31-f6ef-40fe-8725-7fe755999863 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.935342] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 543.935342] env[62974]: value = "task-2653587" [ 543.935342] env[62974]: _type = "Task" [ 543.935342] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.947710] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653587, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.958733] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.959316] env[62974]: DEBUG nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 543.962450] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.323s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.963883] env[62974]: INFO nova.compute.claims [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.992009] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "refresh_cache-572c2c5f-6a24-4532-9c80-d76017e4aaa1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.992362] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired lock "refresh_cache-572c2c5f-6a24-4532-9c80-d76017e4aaa1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.992781] env[62974]: DEBUG nova.network.neutron [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 544.032281] env[62974]: DEBUG nova.network.neutron [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Successfully created port: ecdf9d70-caf4-4804-80f1-953a7ce00868 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 544.446949] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653587, 'name': CreateVM_Task, 'duration_secs': 0.338129} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.447066] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 544.464325] env[62974]: DEBUG oslo_vmware.service [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310fc925-903d-40e5-a868-d40cee10d229 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.469706] env[62974]: DEBUG nova.compute.utils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.475890] env[62974]: DEBUG nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 544.479204] env[62974]: DEBUG nova.network.neutron [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 544.488898] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.489087] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.489784] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 544.491647] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "a8446718-f2df-4bad-b5e3-537f19daa823" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.491936] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "a8446718-f2df-4bad-b5e3-537f19daa823" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.493567] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a32b332c-083d-4e1c-926f-bd750c56eacc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.514363] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 544.514363] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5261e4eb-069f-960b-de1f-6c94abe64030" [ 544.514363] env[62974]: _type = "Task" [ 544.514363] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.525766] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5261e4eb-069f-960b-de1f-6c94abe64030, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.534704] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "2a498460-fced-410b-8b33-3595a2ac6753" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.534994] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2a498460-fced-410b-8b33-3595a2ac6753" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.673142] env[62974]: DEBUG nova.network.neutron [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.747958] env[62974]: DEBUG nova.policy [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc3547f8a76a4c9999ee063bd1f8e3a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60e8dbd2e69944b5b8383fb0daa8968f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 544.980755] env[62974]: DEBUG nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 545.009176] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 545.025511] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.025511] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 545.025511] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.025629] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.026043] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 545.027055] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ee863e4-0450-49d9-91e3-aae99d425c8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.042876] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 545.046900] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 545.047104] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 545.056025] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102249e9-4c9e-4f3d-ac83-86d8ff3b58da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.060024] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b1dc725-091b-433c-80d1-942392733d3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.069593] env[62974]: DEBUG nova.network.neutron [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Successfully updated port: cf420179-c3b4-4a7d-bf15-a2bcdac8faae {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 545.073707] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 545.073707] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5234af2b-2fbb-2354-b7db-804ede37a192" [ 545.073707] env[62974]: _type = "Task" [ 545.073707] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.092221] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Preparing fetch location {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 545.092221] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Creating directory with path [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 545.092221] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81db61bf-da43-4515-85b9-4e14044b3299 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.112957] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Created directory with path [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 545.112957] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Fetch image to [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 545.113108] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Downloading image file data 807f8582-499f-47ee-9d5b-755c9f39bc39 to [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk on the data store datastore2 {{(pid=62974) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 545.113926] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b68f45e-73b5-4bc0-9e2d-82c8ef515f93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.136538] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be8e543-506f-4bb1-b642-148632bc5aae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.156504] env[62974]: DEBUG nova.network.neutron [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Updating instance_info_cache with network_info: [{"id": "4f09f936-5667-4bf0-8972-a2531e87aaee", "address": "fa:16:3e:99:65:68", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f09f936-56", "ovs_interfaceid": "4f09f936-5667-4bf0-8972-a2531e87aaee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.160216] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1beeb1c6-9d26-42a2-a72f-dc28f2a4bcad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.206215] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec7f86e-8548-4c79-9388-5e49d3eb1f02 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.213960] env[62974]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4f47ee9e-4857-4242-bcf9-e94745df906b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.247462] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Downloading image file data 807f8582-499f-47ee-9d5b-755c9f39bc39 to the data store datastore2 {{(pid=62974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 545.311196] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06660d1-d823-4b98-886d-12c7a023b01c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.322548] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37da5e9-783e-4564-9f2d-c547c52dfa19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.369906] env[62974]: DEBUG oslo_vmware.rw_handles [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 545.373376] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e55e09-aee1-4462-9bfe-7715ea3480b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.381537] env[62974]: DEBUG nova.compute.manager [req-2bd4e8bd-dd14-4d20-98f7-6573a01ab5e7 req-b0d32ab1-5ad5-4473-8ece-dfc001e2cc84 service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Received event network-vif-plugged-4f09f936-5667-4bf0-8972-a2531e87aaee {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 545.381738] env[62974]: DEBUG oslo_concurrency.lockutils [req-2bd4e8bd-dd14-4d20-98f7-6573a01ab5e7 req-b0d32ab1-5ad5-4473-8ece-dfc001e2cc84 service nova] Acquiring lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.381934] env[62974]: DEBUG oslo_concurrency.lockutils [req-2bd4e8bd-dd14-4d20-98f7-6573a01ab5e7 req-b0d32ab1-5ad5-4473-8ece-dfc001e2cc84 service nova] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.382407] env[62974]: DEBUG oslo_concurrency.lockutils [req-2bd4e8bd-dd14-4d20-98f7-6573a01ab5e7 req-b0d32ab1-5ad5-4473-8ece-dfc001e2cc84 service nova] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.382589] env[62974]: DEBUG nova.compute.manager [req-2bd4e8bd-dd14-4d20-98f7-6573a01ab5e7 req-b0d32ab1-5ad5-4473-8ece-dfc001e2cc84 service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] No waiting events found dispatching network-vif-plugged-4f09f936-5667-4bf0-8972-a2531e87aaee {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 545.382748] env[62974]: WARNING nova.compute.manager [req-2bd4e8bd-dd14-4d20-98f7-6573a01ab5e7 req-b0d32ab1-5ad5-4473-8ece-dfc001e2cc84 service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Received unexpected event network-vif-plugged-4f09f936-5667-4bf0-8972-a2531e87aaee for instance with vm_state building and task_state spawning. [ 545.385697] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.385697] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.448302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e768e106-e7d2-4ce0-961b-d98e37849dbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.469207] env[62974]: DEBUG nova.compute.provider_tree [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.543375] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.570886] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.573990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "refresh_cache-124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.573990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquired lock "refresh_cache-124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.573990] env[62974]: DEBUG nova.network.neutron [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 545.668117] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Releasing lock "refresh_cache-572c2c5f-6a24-4532-9c80-d76017e4aaa1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.668117] env[62974]: DEBUG nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Instance network_info: |[{"id": "4f09f936-5667-4bf0-8972-a2531e87aaee", "address": "fa:16:3e:99:65:68", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f09f936-56", "ovs_interfaceid": "4f09f936-5667-4bf0-8972-a2531e87aaee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 545.668280] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:65:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f09f936-5667-4bf0-8972-a2531e87aaee', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 545.678206] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Creating folder: Project (4e40d31e6ca74d9c913e2ac2ae32f84c). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 545.681127] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22311869-f9cb-48cb-bca7-f5bf6cb4fc26 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.697367] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Created folder: Project (4e40d31e6ca74d9c913e2ac2ae32f84c) in parent group-v535199. [ 545.697367] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Creating folder: Instances. Parent ref: group-v535203. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 545.697367] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfae8cfe-d257-48ba-af46-ef8b731c6ae2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.706147] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Created folder: Instances in parent group-v535203. [ 545.706459] env[62974]: DEBUG oslo.service.loopingcall [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 545.707051] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 545.707051] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53dd5f8e-f497-460b-875d-a30d1db0c942 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.731872] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 545.731872] env[62974]: value = "task-2653590" [ 545.731872] env[62974]: _type = "Task" [ 545.731872] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.741895] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653590, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.943271] env[62974]: DEBUG nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 545.973275] env[62974]: DEBUG nova.scheduler.client.report [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 545.994314] env[62974]: DEBUG nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 546.043885] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 546.044258] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.044376] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 546.044851] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.044851] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 546.045468] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 546.045722] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 546.045935] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 546.046335] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 546.046543] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 546.046852] env[62974]: DEBUG nova.virt.hardware [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 546.048146] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30b2715-0f5a-49a4-9d38-530c2cecd038 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.060216] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65da3548-6f34-41ef-b609-dc2113f32cfe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.084015] env[62974]: DEBUG oslo_vmware.rw_handles [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Completed reading data from the image iterator. {{(pid=62974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 546.084015] env[62974]: DEBUG oslo_vmware.rw_handles [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 546.155312] env[62974]: DEBUG nova.network.neutron [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 546.247893] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653590, 'name': CreateVM_Task, 'duration_secs': 0.359124} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.247893] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 546.249066] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.249066] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.249066] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 546.249066] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f09d8726-17cb-45ec-a9b7-99bb6a6d87fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.251399] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Downloaded image file data 807f8582-499f-47ee-9d5b-755c9f39bc39 to vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk on the data store datastore2 {{(pid=62974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 546.253081] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Caching image {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 546.253322] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Copying Virtual Disk [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk to [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 546.253953] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f819bcbd-d871-47d0-a058-86b69644cad3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.260464] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 546.260464] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521e62cf-4583-12ee-31f4-c14307f72b65" [ 546.260464] env[62974]: _type = "Task" [ 546.260464] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.271756] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521e62cf-4583-12ee-31f4-c14307f72b65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.273904] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 546.273904] env[62974]: value = "task-2653591" [ 546.273904] env[62974]: _type = "Task" [ 546.273904] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.286099] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.320089] env[62974]: DEBUG nova.network.neutron [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Successfully created port: 7a90f0f7-f944-4a9c-84ff-875c8ea990b9 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.474422] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.482438] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.482532] env[62974]: DEBUG nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 546.489935] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.037s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.490977] env[62974]: INFO nova.compute.claims [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.688263] env[62974]: DEBUG nova.network.neutron [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Updating instance_info_cache with network_info: [{"id": "cf420179-c3b4-4a7d-bf15-a2bcdac8faae", "address": "fa:16:3e:a0:a3:12", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf420179-c3", "ovs_interfaceid": "cf420179-c3b4-4a7d-bf15-a2bcdac8faae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.780977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.782320] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 546.782320] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.787062] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653591, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.839201] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "001557f9-ea50-4e86-9eeb-dd4436791453" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.839201] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "001557f9-ea50-4e86-9eeb-dd4436791453" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.001137] env[62974]: DEBUG nova.compute.utils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.007365] env[62974]: DEBUG nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 547.007434] env[62974]: DEBUG nova.network.neutron [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 547.167432] env[62974]: DEBUG nova.network.neutron [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Successfully updated port: ecdf9d70-caf4-4804-80f1-953a7ce00868 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 547.197234] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Releasing lock "refresh_cache-124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.197234] env[62974]: DEBUG nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Instance network_info: |[{"id": "cf420179-c3b4-4a7d-bf15-a2bcdac8faae", "address": "fa:16:3e:a0:a3:12", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf420179-c3", "ovs_interfaceid": "cf420179-c3b4-4a7d-bf15-a2bcdac8faae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 547.198220] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:a3:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf420179-c3b4-4a7d-bf15-a2bcdac8faae', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 547.206699] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Creating folder: Project (c8d8c428903e42ad9b1612d48d00bcaa). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 547.207809] env[62974]: DEBUG nova.policy [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cfd67943e52542518f760aee13e11de7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '552a7db99e8a4daaba912b390a4d1982', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 547.209732] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1534c3c4-e551-4fd6-9f86-801753c3a9bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.221107] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Created folder: Project (c8d8c428903e42ad9b1612d48d00bcaa) in parent group-v535199. [ 547.221418] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Creating folder: Instances. Parent ref: group-v535206. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 547.221762] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5eedc69-dc89-4825-b924-97be7ae72580 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.232432] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Created folder: Instances in parent group-v535206. [ 547.232432] env[62974]: DEBUG oslo.service.loopingcall [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.233703] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 547.233703] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c59c0a1b-3523-4357-8546-c03a43f3b0e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.254135] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 547.254135] env[62974]: value = "task-2653594" [ 547.254135] env[62974]: _type = "Task" [ 547.254135] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.262721] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653594, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.284917] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.835635} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.285308] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Copied Virtual Disk [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk to [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 547.285782] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleting the datastore file [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 547.285782] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98ef06e9-77f9-46f1-baf0-9cd4eaab0d79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.291986] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 547.291986] env[62974]: value = "task-2653595" [ 547.291986] env[62974]: _type = "Task" [ 547.291986] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.300167] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.307339] env[62974]: DEBUG nova.compute.manager [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Received event network-changed-947659a6-f0ce-4065-a591-6a15666e4ac5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 547.307658] env[62974]: DEBUG nova.compute.manager [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Refreshing instance network info cache due to event network-changed-947659a6-f0ce-4065-a591-6a15666e4ac5. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 547.307899] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Acquiring lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.307999] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Acquired lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.308196] env[62974]: DEBUG nova.network.neutron [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Refreshing network info cache for port 947659a6-f0ce-4065-a591-6a15666e4ac5 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 547.511458] env[62974]: DEBUG nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 547.675594] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "refresh_cache-2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.675594] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired lock "refresh_cache-2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.675594] env[62974]: DEBUG nova.network.neutron [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 547.769735] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653594, 'name': CreateVM_Task, 'duration_secs': 0.382588} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.770454] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 547.771114] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.771486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.772402] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 547.772402] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3705b84e-69cc-492a-b473-a1e6bb9ec8e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.782019] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 547.782019] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a25143-6246-3463-dfea-ce8516078b66" [ 547.782019] env[62974]: _type = "Task" [ 547.782019] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.802061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.802061] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 547.802061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.807157] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022691} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.807398] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 547.807622] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Moving file from [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21/807f8582-499f-47ee-9d5b-755c9f39bc39 to [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39. {{(pid=62974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 547.807887] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-25ecb304-e8bb-4c65-8d4c-aef47727f0e4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.815134] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 547.815134] env[62974]: value = "task-2653596" [ 547.815134] env[62974]: _type = "Task" [ 547.815134] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.827741] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653596, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.853296] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61639d12-45c9-420b-baa4-0868332c90af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.862675] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b8c6f7-a1c4-4d3b-a436-4cb2623b53a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.897024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa168068-0632-459b-ba2c-3c02d30979ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.904948] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b9b19d-f072-45bd-8a0c-75f170430b20 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.921441] env[62974]: DEBUG nova.compute.provider_tree [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.250621] env[62974]: DEBUG nova.network.neutron [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Successfully created port: 227bb3d2-feed-4b53-8666-bc56eb0c3d3e {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 548.326313] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653596, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.032945} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.326313] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] File moved {{(pid=62974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 548.327903] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Cleaning up location [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 548.327903] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleting the datastore file [datastore2] vmware_temp/ab5df7f8-40c5-4fe8-868b-6778abf6eb21 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 548.327903] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7bd3a37a-d4aa-42a2-832e-2a0e44f11fbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.333954] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 548.333954] env[62974]: value = "task-2653597" [ 548.333954] env[62974]: _type = "Task" [ 548.333954] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.343890] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.424775] env[62974]: DEBUG nova.scheduler.client.report [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 548.437079] env[62974]: DEBUG nova.network.neutron [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.533159] env[62974]: DEBUG nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 548.559176] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 548.559176] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.559176] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 548.559391] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.559391] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 548.559391] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 548.559391] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 548.559391] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 548.559839] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 548.560320] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 548.560590] env[62974]: DEBUG nova.virt.hardware [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 548.561933] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73aefc0-d910-4aa0-ac92-1bffe2fda1dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.573015] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665f1355-bfc1-41c7-862a-cc0c226bedb3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.691679] env[62974]: DEBUG nova.compute.manager [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Received event network-changed-4f09f936-5667-4bf0-8972-a2531e87aaee {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 548.692306] env[62974]: DEBUG nova.compute.manager [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Refreshing instance network info cache due to event network-changed-4f09f936-5667-4bf0-8972-a2531e87aaee. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 548.692406] env[62974]: DEBUG oslo_concurrency.lockutils [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] Acquiring lock "refresh_cache-572c2c5f-6a24-4532-9c80-d76017e4aaa1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.692597] env[62974]: DEBUG oslo_concurrency.lockutils [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] Acquired lock "refresh_cache-572c2c5f-6a24-4532-9c80-d76017e4aaa1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.692763] env[62974]: DEBUG nova.network.neutron [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Refreshing network info cache for port 4f09f936-5667-4bf0-8972-a2531e87aaee {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 548.763898] env[62974]: DEBUG nova.network.neutron [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updated VIF entry in instance network info cache for port 947659a6-f0ce-4065-a591-6a15666e4ac5. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 548.763898] env[62974]: DEBUG nova.network.neutron [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [{"id": "947659a6-f0ce-4065-a591-6a15666e4ac5", "address": "fa:16:3e:f1:cd:d9", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947659a6-f0", "ovs_interfaceid": "947659a6-f0ce-4065-a591-6a15666e4ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.845609] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024848} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.846209] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 548.847189] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-261c5af9-adb2-4c26-ae7e-ef8cd7bfd412 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.853155] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 548.853155] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521f70af-f8de-6668-0906-8a77c5c94b47" [ 548.853155] env[62974]: _type = "Task" [ 548.853155] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.862364] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521f70af-f8de-6668-0906-8a77c5c94b47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.881286] env[62974]: DEBUG nova.network.neutron [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Updating instance_info_cache with network_info: [{"id": "ecdf9d70-caf4-4804-80f1-953a7ce00868", "address": "fa:16:3e:8d:02:d4", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecdf9d70-ca", "ovs_interfaceid": "ecdf9d70-caf4-4804-80f1-953a7ce00868", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.935026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.935026] env[62974]: DEBUG nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 548.936593] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.909s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.937016] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.937273] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 548.937741] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.394s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.940092] env[62974]: INFO nova.compute.claims [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.949083] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d93c815-2e2e-4ffa-9b5f-da2234081f66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.957334] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cbe1c6-e41c-4168-ac92-265fac68635d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.973649] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34d790c-8072-4eb6-9e91-56dceea68e56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.983430] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8ac13d-7cdc-482f-864e-bebdff0d5c5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.016412] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181169MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 549.016412] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.059438] env[62974]: DEBUG nova.network.neutron [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Successfully updated port: 7a90f0f7-f944-4a9c-84ff-875c8ea990b9 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 549.266354] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Releasing lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.266622] env[62974]: DEBUG nova.compute.manager [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Received event network-vif-plugged-cf420179-c3b4-4a7d-bf15-a2bcdac8faae {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 549.267819] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Acquiring lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.267819] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.267819] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.267819] env[62974]: DEBUG nova.compute.manager [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] No waiting events found dispatching network-vif-plugged-cf420179-c3b4-4a7d-bf15-a2bcdac8faae {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 549.267819] env[62974]: WARNING nova.compute.manager [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Received unexpected event network-vif-plugged-cf420179-c3b4-4a7d-bf15-a2bcdac8faae for instance with vm_state building and task_state spawning. [ 549.269228] env[62974]: DEBUG nova.compute.manager [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Received event network-changed-cf420179-c3b4-4a7d-bf15-a2bcdac8faae {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 549.269228] env[62974]: DEBUG nova.compute.manager [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Refreshing instance network info cache due to event network-changed-cf420179-c3b4-4a7d-bf15-a2bcdac8faae. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 549.269228] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Acquiring lock "refresh_cache-124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.269228] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Acquired lock "refresh_cache-124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.269228] env[62974]: DEBUG nova.network.neutron [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Refreshing network info cache for port cf420179-c3b4-4a7d-bf15-a2bcdac8faae {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 549.329489] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "cf73422d-7f4b-4bae-9d69-de74d7211243" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.331268] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.364681] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521f70af-f8de-6668-0906-8a77c5c94b47, 'name': SearchDatastore_Task, 'duration_secs': 0.008964} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.364939] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.365210] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] d8b7a39f-ec73-4a87-9b1e-9428ca72f895/d8b7a39f-ec73-4a87-9b1e-9428ca72f895.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 549.365519] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.365714] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 549.365919] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4ed84d3-1bba-4f92-a656-fa9b903b9bbc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.368246] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c23ab08e-a947-412c-b2a0-ed334ecb5f08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.374587] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 549.374587] env[62974]: value = "task-2653598" [ 549.374587] env[62974]: _type = "Task" [ 549.374587] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.378892] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 549.379228] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 549.380120] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4af4bb46-eaae-4554-9574-85047a7ac8ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.384914] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.387199] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Releasing lock "refresh_cache-2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.387308] env[62974]: DEBUG nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Instance network_info: |[{"id": "ecdf9d70-caf4-4804-80f1-953a7ce00868", "address": "fa:16:3e:8d:02:d4", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecdf9d70-ca", "ovs_interfaceid": "ecdf9d70-caf4-4804-80f1-953a7ce00868", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 549.388989] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:02:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecdf9d70-caf4-4804-80f1-953a7ce00868', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 549.399133] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Creating folder: Project (13053a9449eb4b14a13ad720083975db). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 549.399133] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 549.399133] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5231c04e-067a-313e-d9fa-00d05792151d" [ 549.399133] env[62974]: _type = "Task" [ 549.399133] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.403209] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e67552d7-72d3-4a2b-9e99-766c1626a6c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.415240] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5231c04e-067a-313e-d9fa-00d05792151d, 'name': SearchDatastore_Task, 'duration_secs': 0.008315} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.416608] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5069d205-db4c-4bae-81c1-283f45ffa0af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.419187] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Created folder: Project (13053a9449eb4b14a13ad720083975db) in parent group-v535199. [ 549.419367] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Creating folder: Instances. Parent ref: group-v535209. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 549.419598] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b3de0b6-dd0c-4c5d-bfb2-44021096f216 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.424023] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 549.424023] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a51664-1620-24e5-f7ab-5916313a63cd" [ 549.424023] env[62974]: _type = "Task" [ 549.424023] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.428558] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Created folder: Instances in parent group-v535209. [ 549.428786] env[62974]: DEBUG oslo.service.loopingcall [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.429358] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 549.429540] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-412bde98-1b32-4f41-80dc-6c8d843fe34b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.452893] env[62974]: DEBUG nova.compute.utils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.454111] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a51664-1620-24e5-f7ab-5916313a63cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.455084] env[62974]: DEBUG nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 549.455253] env[62974]: DEBUG nova.network.neutron [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 549.459512] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 549.459512] env[62974]: value = "task-2653601" [ 549.459512] env[62974]: _type = "Task" [ 549.459512] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.471727] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653601, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.563685] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "refresh_cache-8f4faa77-4f18-41da-b8d0-efba799d6ec6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.563976] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquired lock "refresh_cache-8f4faa77-4f18-41da-b8d0-efba799d6ec6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.564079] env[62974]: DEBUG nova.network.neutron [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 549.566744] env[62974]: DEBUG nova.policy [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3dbf4170c4b4c58adabcef28bbab5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd546e5faf230414aa1cb1cb08bcc6bcc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 549.788794] env[62974]: DEBUG nova.network.neutron [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Updated VIF entry in instance network info cache for port 4f09f936-5667-4bf0-8972-a2531e87aaee. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 549.789146] env[62974]: DEBUG nova.network.neutron [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Updating instance_info_cache with network_info: [{"id": "4f09f936-5667-4bf0-8972-a2531e87aaee", "address": "fa:16:3e:99:65:68", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f09f936-56", "ovs_interfaceid": "4f09f936-5667-4bf0-8972-a2531e87aaee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.889102] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495965} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.889701] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] d8b7a39f-ec73-4a87-9b1e-9428ca72f895/d8b7a39f-ec73-4a87-9b1e-9428ca72f895.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 549.890199] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 549.891190] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a95975ff-d518-47dd-90f9-934392bfc9a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.899718] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 549.899718] env[62974]: value = "task-2653602" [ 549.899718] env[62974]: _type = "Task" [ 549.899718] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.912695] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.936316] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a51664-1620-24e5-f7ab-5916313a63cd, 'name': SearchDatastore_Task, 'duration_secs': 0.008205} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.939331] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.940093] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 572c2c5f-6a24-4532-9c80-d76017e4aaa1/572c2c5f-6a24-4532-9c80-d76017e4aaa1.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 549.940238] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.940433] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 549.940710] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbe2f8a7-0201-4b73-93a9-e4e3ff6254be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.944448] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a865e96c-d249-48bc-ae1a-42a3f7ad11e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.951820] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 549.951820] env[62974]: value = "task-2653603" [ 549.951820] env[62974]: _type = "Task" [ 549.951820] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.957449] env[62974]: DEBUG nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 549.964477] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 549.964677] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 549.971228] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-887eaf9f-e58e-4a95-8962-6c8d22c781a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.977330] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.988625] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653601, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.989071] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 549.989071] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5265b4e7-65fa-f7ff-2ad0-e1c74d07385f" [ 549.989071] env[62974]: _type = "Task" [ 549.989071] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.001462] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5265b4e7-65fa-f7ff-2ad0-e1c74d07385f, 'name': SearchDatastore_Task, 'duration_secs': 0.009426} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.002391] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ab20e20-d1ae-4b54-9872-2f1a3ab5c6b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.017611] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 550.017611] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a10517-2334-ead3-58af-8d385800ea56" [ 550.017611] env[62974]: _type = "Task" [ 550.017611] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.035022] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a10517-2334-ead3-58af-8d385800ea56, 'name': SearchDatastore_Task, 'duration_secs': 0.007812} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.035022] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.035022] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d/124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 550.035022] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7cbe1187-a076-43aa-8e4b-cd6b4f36e574 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.041914] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 550.041914] env[62974]: value = "task-2653604" [ 550.041914] env[62974]: _type = "Task" [ 550.041914] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.056957] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.150886] env[62974]: DEBUG nova.network.neutron [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.205368] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9c7e79-4240-45fe-98cb-9b808daef8e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.216058] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4060f1fb-1a99-4922-9a63-9c3d0ee93606 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.255256] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad5dad5-05de-4145-b141-1514e9311fea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.263441] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6979660-7eb9-422f-9e1b-5f63a0f1f1a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.277474] env[62974]: DEBUG nova.compute.provider_tree [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.294377] env[62974]: DEBUG oslo_concurrency.lockutils [req-2bd720d3-c14a-49e8-a2fe-af344563b29d req-5d8a7cdf-8188-482a-b4b9-9c0c40072c8b service nova] Releasing lock "refresh_cache-572c2c5f-6a24-4532-9c80-d76017e4aaa1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.295384] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "7f0d367d-9d60-414b-990e-56a2b43fd963" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.295636] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.398635] env[62974]: DEBUG nova.network.neutron [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Updating instance_info_cache with network_info: [{"id": "7a90f0f7-f944-4a9c-84ff-875c8ea990b9", "address": "fa:16:3e:2f:6f:3c", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a90f0f7-f9", "ovs_interfaceid": "7a90f0f7-f944-4a9c-84ff-875c8ea990b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.409165] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080906} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.409431] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 550.410324] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7223c3fa-a4a5-4664-b998-4481a305aab8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.434897] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] d8b7a39f-ec73-4a87-9b1e-9428ca72f895/d8b7a39f-ec73-4a87-9b1e-9428ca72f895.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 550.436105] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-473609b8-ae67-48bb-b2ba-44378bd3ae1e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.458838] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 550.458838] env[62974]: value = "task-2653605" [ 550.458838] env[62974]: _type = "Task" [ 550.458838] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.470621] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440465} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.471138] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 572c2c5f-6a24-4532-9c80-d76017e4aaa1/572c2c5f-6a24-4532-9c80-d76017e4aaa1.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 550.475516] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 550.475516] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-046f680e-e4d3-4b8f-97ed-4b243ac4653e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.481886] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.493499] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653601, 'name': CreateVM_Task, 'duration_secs': 0.535338} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.493704] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 550.494480] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.494634] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.494965] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 550.495731] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-869b0a63-0355-451a-832f-c1dc2cfe1b74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.501242] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 550.501242] env[62974]: value = "task-2653606" [ 550.501242] env[62974]: _type = "Task" [ 550.501242] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.502939] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 550.502939] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52084d5d-3026-a0d6-02aa-e0b267e1ef53" [ 550.502939] env[62974]: _type = "Task" [ 550.502939] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.513633] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.518566] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52084d5d-3026-a0d6-02aa-e0b267e1ef53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.559301] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653604, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.682553] env[62974]: DEBUG nova.network.neutron [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Updated VIF entry in instance network info cache for port cf420179-c3b4-4a7d-bf15-a2bcdac8faae. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 550.684141] env[62974]: DEBUG nova.network.neutron [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Updating instance_info_cache with network_info: [{"id": "cf420179-c3b4-4a7d-bf15-a2bcdac8faae", "address": "fa:16:3e:a0:a3:12", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf420179-c3", "ovs_interfaceid": "cf420179-c3b4-4a7d-bf15-a2bcdac8faae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.780747] env[62974]: DEBUG nova.scheduler.client.report [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 550.820517] env[62974]: DEBUG nova.network.neutron [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Successfully created port: a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.875196] env[62974]: DEBUG nova.compute.manager [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Received event network-vif-plugged-ecdf9d70-caf4-4804-80f1-953a7ce00868 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 550.875196] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] Acquiring lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.876138] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.877864] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.003s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.878051] env[62974]: DEBUG nova.compute.manager [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] No waiting events found dispatching network-vif-plugged-ecdf9d70-caf4-4804-80f1-953a7ce00868 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 550.878298] env[62974]: WARNING nova.compute.manager [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Received unexpected event network-vif-plugged-ecdf9d70-caf4-4804-80f1-953a7ce00868 for instance with vm_state building and task_state spawning. [ 550.878533] env[62974]: DEBUG nova.compute.manager [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Received event network-changed-ecdf9d70-caf4-4804-80f1-953a7ce00868 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 550.878814] env[62974]: DEBUG nova.compute.manager [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Refreshing instance network info cache due to event network-changed-ecdf9d70-caf4-4804-80f1-953a7ce00868. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 550.879138] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] Acquiring lock "refresh_cache-2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.879556] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] Acquired lock "refresh_cache-2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.879803] env[62974]: DEBUG nova.network.neutron [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Refreshing network info cache for port ecdf9d70-caf4-4804-80f1-953a7ce00868 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 550.902235] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Releasing lock "refresh_cache-8f4faa77-4f18-41da-b8d0-efba799d6ec6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.902632] env[62974]: DEBUG nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Instance network_info: |[{"id": "7a90f0f7-f944-4a9c-84ff-875c8ea990b9", "address": "fa:16:3e:2f:6f:3c", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a90f0f7-f9", "ovs_interfaceid": "7a90f0f7-f944-4a9c-84ff-875c8ea990b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 550.903714] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:6f:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a90f0f7-f944-4a9c-84ff-875c8ea990b9', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 550.913244] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Creating folder: Project (60e8dbd2e69944b5b8383fb0daa8968f). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 550.915066] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce539fec-70b3-4256-a4f8-ec62eeb2454c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.925855] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Created folder: Project (60e8dbd2e69944b5b8383fb0daa8968f) in parent group-v535199. [ 550.926057] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Creating folder: Instances. Parent ref: group-v535212. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 550.926287] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23d478f9-9823-45d4-b94f-ab69b3005c20 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.936664] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Created folder: Instances in parent group-v535212. [ 550.936913] env[62974]: DEBUG oslo.service.loopingcall [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.937353] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 550.938243] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4aecdf3-0177-4ac0-bf05-39d7d2dbec88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.965972] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 550.965972] env[62974]: value = "task-2653609" [ 550.965972] env[62974]: _type = "Task" [ 550.965972] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.976635] env[62974]: DEBUG nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 550.978817] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653605, 'name': ReconfigVM_Task, 'duration_secs': 0.485157} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.980661] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Reconfigured VM instance instance-00000001 to attach disk [datastore2] d8b7a39f-ec73-4a87-9b1e-9428ca72f895/d8b7a39f-ec73-4a87-9b1e-9428ca72f895.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 550.980661] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73016f36-8225-42fb-a7e8-c4a55dbe4b3e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.986779] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653609, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.994516] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 550.994516] env[62974]: value = "task-2653610" [ 550.994516] env[62974]: _type = "Task" [ 550.994516] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.007663] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653610, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.032194] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52084d5d-3026-a0d6-02aa-e0b267e1ef53, 'name': SearchDatastore_Task, 'duration_secs': 0.052247} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.036560] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.037276] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 551.037276] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.037276] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.037457] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 551.037683] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103604} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.040058] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 551.040276] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.040432] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 551.040586] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.040712] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 551.040846] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 551.041063] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 551.041220] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 551.041379] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 551.042047] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 551.042047] env[62974]: DEBUG nova.virt.hardware [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 551.042047] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16849a70-1bf8-4788-a342-e69b40b733f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.044778] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 551.045667] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757f3afc-12e8-40f7-a59e-325bdaec7f0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.051168] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e40066a-b572-41d9-bdf7-055afdffc05a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.075505] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc162f15-b942-47d2-8724-b5a2fb187b8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.088886] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 572c2c5f-6a24-4532-9c80-d76017e4aaa1/572c2c5f-6a24-4532-9c80-d76017e4aaa1.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 551.090763] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c89a570-367f-45f4-98e7-225d43748961 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.107243] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 551.107243] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 551.110180] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f0fb98f-ad5d-4d17-95e1-aeea51f27ea4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.113095] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702517} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.121753] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d/124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 551.121996] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 551.122969] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a809a9c-c60e-4fba-86b6-cebdb9684eb5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.127710] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 551.127710] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52010599-542c-1266-ff8d-4a440800c5be" [ 551.127710] env[62974]: _type = "Task" [ 551.127710] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.127973] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 551.127973] env[62974]: value = "task-2653611" [ 551.127973] env[62974]: _type = "Task" [ 551.127973] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.138849] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 551.138849] env[62974]: value = "task-2653612" [ 551.138849] env[62974]: _type = "Task" [ 551.138849] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.151145] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52010599-542c-1266-ff8d-4a440800c5be, 'name': SearchDatastore_Task, 'duration_secs': 0.013875} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.151145] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653611, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.153544] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0e408f5-f6ab-45b9-a6a9-48e535c80b44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.158419] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653612, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.161763] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 551.161763] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b84b25-1075-4c2c-3e7a-6555b0ac26e2" [ 551.161763] env[62974]: _type = "Task" [ 551.161763] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.171516] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b84b25-1075-4c2c-3e7a-6555b0ac26e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.187187] env[62974]: DEBUG oslo_concurrency.lockutils [req-8ee136b2-2350-48ad-a8a4-6cac25cf091a req-a60dedf5-349f-4d14-8c01-c2e2376844c5 service nova] Releasing lock "refresh_cache-124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.290665] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.291646] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 551.294696] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.724s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.295886] env[62974]: INFO nova.compute.claims [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.456238] env[62974]: DEBUG nova.network.neutron [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Successfully updated port: 227bb3d2-feed-4b53-8666-bc56eb0c3d3e {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 551.476985] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653609, 'name': CreateVM_Task, 'duration_secs': 0.373922} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.479270] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 551.479929] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.480116] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.480407] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 551.480931] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7576d70-f7e9-4c26-999d-145be2e527c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.485672] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 551.485672] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd13f2-7fac-f35b-0e0a-334e807d6aa3" [ 551.485672] env[62974]: _type = "Task" [ 551.485672] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.493838] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd13f2-7fac-f35b-0e0a-334e807d6aa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.501223] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653610, 'name': Rename_Task, 'duration_secs': 0.150828} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.501511] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 551.501858] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b83b0f51-4d3c-4f0a-a78b-0db2903b583e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.508485] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 551.508485] env[62974]: value = "task-2653613" [ 551.508485] env[62974]: _type = "Task" [ 551.508485] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.516342] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.638161] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653611, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.647356] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653612, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078194} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.647490] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 551.648289] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6d6f97-3e04-43d0-8daf-1fdf57ffffa1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.670806] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d/124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 551.670806] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c887a70a-6b6e-4d64-a0a5-dd1f8a3a086c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.697225] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b84b25-1075-4c2c-3e7a-6555b0ac26e2, 'name': SearchDatastore_Task, 'duration_secs': 0.020062} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.698634] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.698910] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e/2174cb7d-3e73-4529-b9f8-735dd6dfcf4e.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 551.699281] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 551.699281] env[62974]: value = "task-2653614" [ 551.699281] env[62974]: _type = "Task" [ 551.699281] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.699723] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c959b252-bf4b-458f-8bbe-0c3f507bd708 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.709759] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653614, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.714806] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 551.714806] env[62974]: value = "task-2653615" [ 551.714806] env[62974]: _type = "Task" [ 551.714806] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.726294] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.800505] env[62974]: DEBUG nova.compute.utils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.804922] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 551.805124] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 551.959513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "refresh_cache-2313468e-820f-4fff-bdeb-5d542c94584d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.959600] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquired lock "refresh_cache-2313468e-820f-4fff-bdeb-5d542c94584d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.960877] env[62974]: DEBUG nova.network.neutron [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 551.972763] env[62974]: DEBUG nova.policy [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d977aa355614e2bbab76080bbe411b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c540bb1bb0e4e86a6e067653ae20895', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 551.999037] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd13f2-7fac-f35b-0e0a-334e807d6aa3, 'name': SearchDatastore_Task, 'duration_secs': 0.023926} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.999037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.999293] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 552.002318] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.002318] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.002318] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 552.002318] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45c576f7-a448-4f56-99f2-ca94712c9ca6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.016236] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 552.016236] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 552.017462] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-267162e0-19b9-43d0-a33f-ea7d02e80f76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.024256] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653613, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.028495] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 552.028495] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523497ee-99ad-e6c5-01d6-f16d1231e050" [ 552.028495] env[62974]: _type = "Task" [ 552.028495] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.037742] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523497ee-99ad-e6c5-01d6-f16d1231e050, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.066768] env[62974]: DEBUG nova.network.neutron [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Updated VIF entry in instance network info cache for port ecdf9d70-caf4-4804-80f1-953a7ce00868. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 552.066768] env[62974]: DEBUG nova.network.neutron [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Updating instance_info_cache with network_info: [{"id": "ecdf9d70-caf4-4804-80f1-953a7ce00868", "address": "fa:16:3e:8d:02:d4", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecdf9d70-ca", "ovs_interfaceid": "ecdf9d70-caf4-4804-80f1-953a7ce00868", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.141957] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653611, 'name': ReconfigVM_Task, 'duration_secs': 0.73506} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.142295] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 572c2c5f-6a24-4532-9c80-d76017e4aaa1/572c2c5f-6a24-4532-9c80-d76017e4aaa1.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 552.142977] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef7f8101-38be-488e-9065-ba2f8d2956b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.149833] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 552.149833] env[62974]: value = "task-2653616" [ 552.149833] env[62974]: _type = "Task" [ 552.149833] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.158963] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653616, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.219805] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653614, 'name': ReconfigVM_Task, 'duration_secs': 0.330712} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.224354] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d/124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 552.224930] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d14ee5f-17e9-4a24-90cd-daf3ad5dafcd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.233375] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653615, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.235163] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 552.235163] env[62974]: value = "task-2653617" [ 552.235163] env[62974]: _type = "Task" [ 552.235163] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.244598] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653617, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.247405] env[62974]: DEBUG nova.compute.manager [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Received event network-vif-plugged-7a90f0f7-f944-4a9c-84ff-875c8ea990b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 552.247707] env[62974]: DEBUG oslo_concurrency.lockutils [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] Acquiring lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.247707] env[62974]: DEBUG oslo_concurrency.lockutils [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.247984] env[62974]: DEBUG oslo_concurrency.lockutils [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.248220] env[62974]: DEBUG nova.compute.manager [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] No waiting events found dispatching network-vif-plugged-7a90f0f7-f944-4a9c-84ff-875c8ea990b9 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 552.248317] env[62974]: WARNING nova.compute.manager [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Received unexpected event network-vif-plugged-7a90f0f7-f944-4a9c-84ff-875c8ea990b9 for instance with vm_state building and task_state spawning. [ 552.248780] env[62974]: DEBUG nova.compute.manager [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Received event network-changed-7a90f0f7-f944-4a9c-84ff-875c8ea990b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 552.248961] env[62974]: DEBUG nova.compute.manager [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Refreshing instance network info cache due to event network-changed-7a90f0f7-f944-4a9c-84ff-875c8ea990b9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 552.249182] env[62974]: DEBUG oslo_concurrency.lockutils [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] Acquiring lock "refresh_cache-8f4faa77-4f18-41da-b8d0-efba799d6ec6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.249310] env[62974]: DEBUG oslo_concurrency.lockutils [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] Acquired lock "refresh_cache-8f4faa77-4f18-41da-b8d0-efba799d6ec6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.249488] env[62974]: DEBUG nova.network.neutron [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Refreshing network info cache for port 7a90f0f7-f944-4a9c-84ff-875c8ea990b9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 552.306411] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 552.521261] env[62974]: DEBUG oslo_vmware.api [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653613, 'name': PowerOnVM_Task, 'duration_secs': 0.518153} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.521424] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 552.523372] env[62974]: INFO nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Took 15.52 seconds to spawn the instance on the hypervisor. [ 552.523372] env[62974]: DEBUG nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 552.523372] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70937c09-2d22-4db4-b555-3f72995d9015 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.532213] env[62974]: DEBUG nova.network.neutron [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.553257] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523497ee-99ad-e6c5-01d6-f16d1231e050, 'name': SearchDatastore_Task, 'duration_secs': 0.086543} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.554687] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45a7c9f6-cfc8-40f1-9f06-3e85c6d7eae2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.559826] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 552.559826] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5229a002-d669-3bcf-e840-82f75c550008" [ 552.559826] env[62974]: _type = "Task" [ 552.559826] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.574508] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c7210e-4ba3-4efa-9bd0-9120dfbe5d11 req-29afc184-ff22-4bde-a76d-0e66608a3121 service nova] Releasing lock "refresh_cache-2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.575650] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5229a002-d669-3bcf-e840-82f75c550008, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.595847] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9a5d14-0111-406a-9adf-83496050ab1e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.604777] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6e95b2-cc2f-4ad7-b89c-f5fcb6c1066d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.643186] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccd9571-705b-4edb-840f-c9aa04ad9fe1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.655125] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d34916-802e-45c5-a15f-b531d2ea63f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.664272] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653616, 'name': Rename_Task, 'duration_secs': 0.325155} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.672053] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 552.672549] env[62974]: DEBUG nova.compute.provider_tree [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.674393] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3017f92-8c6c-4eb6-80b9-0b8e908f4706 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.683647] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 552.683647] env[62974]: value = "task-2653618" [ 552.683647] env[62974]: _type = "Task" [ 552.683647] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.693442] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.732117] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680104} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.733201] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e/2174cb7d-3e73-4529-b9f8-735dd6dfcf4e.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 552.733201] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 552.733201] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bb6a2ec-dfda-4496-a396-a7dfda6fb48e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.741628] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 552.741628] env[62974]: value = "task-2653619" [ 552.741628] env[62974]: _type = "Task" [ 552.741628] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.756185] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653617, 'name': Rename_Task, 'duration_secs': 0.25092} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.757079] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 552.758243] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34de25d1-7d42-47c1-bdd8-bf035fdd8db9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.762599] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.769392] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 552.769392] env[62974]: value = "task-2653620" [ 552.769392] env[62974]: _type = "Task" [ 552.769392] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.781177] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.901941] env[62974]: DEBUG nova.network.neutron [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Updating instance_info_cache with network_info: [{"id": "227bb3d2-feed-4b53-8666-bc56eb0c3d3e", "address": "fa:16:3e:fe:5f:80", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap227bb3d2-fe", "ovs_interfaceid": "227bb3d2-feed-4b53-8666-bc56eb0c3d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.922395] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Successfully created port: 8f4af602-edfd-46cd-8684-cff88d420350 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.056404] env[62974]: INFO nova.compute.manager [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Took 20.27 seconds to build instance. [ 553.072452] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5229a002-d669-3bcf-e840-82f75c550008, 'name': SearchDatastore_Task, 'duration_secs': 0.009009} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.072737] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.072994] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8f4faa77-4f18-41da-b8d0-efba799d6ec6/8f4faa77-4f18-41da-b8d0-efba799d6ec6.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 553.073263] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ed859a3-4435-4fd2-bffe-70373906b81d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.082163] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 553.082163] env[62974]: value = "task-2653621" [ 553.082163] env[62974]: _type = "Task" [ 553.082163] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.095098] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.178175] env[62974]: DEBUG nova.scheduler.client.report [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 553.201735] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653618, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.228666] env[62974]: DEBUG nova.network.neutron [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Updated VIF entry in instance network info cache for port 7a90f0f7-f944-4a9c-84ff-875c8ea990b9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 553.229232] env[62974]: DEBUG nova.network.neutron [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Updating instance_info_cache with network_info: [{"id": "7a90f0f7-f944-4a9c-84ff-875c8ea990b9", "address": "fa:16:3e:2f:6f:3c", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a90f0f7-f9", "ovs_interfaceid": "7a90f0f7-f944-4a9c-84ff-875c8ea990b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.253267] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070061} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.253666] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 553.254387] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae77cc3-86c0-4f60-b7dd-2157c4c3621b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.278948] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e/2174cb7d-3e73-4529-b9f8-735dd6dfcf4e.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 553.279318] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3fd7231-6fd3-46e0-a5fb-ddf6c18b166b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.304399] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653620, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.305867] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 553.305867] env[62974]: value = "task-2653622" [ 553.305867] env[62974]: _type = "Task" [ 553.305867] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.314641] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653622, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.320941] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 553.357505] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 553.357731] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.357989] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 553.358648] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.358854] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 553.359431] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 553.359431] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 553.359431] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 553.360121] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 553.360121] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 553.360290] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 553.361882] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443887a8-72e8-408c-8732-4f196fbd7192 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.370677] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764b6784-9d1a-4b47-a9bd-95ddf90e1037 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.405030] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Releasing lock "refresh_cache-2313468e-820f-4fff-bdeb-5d542c94584d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.405485] env[62974]: DEBUG nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Instance network_info: |[{"id": "227bb3d2-feed-4b53-8666-bc56eb0c3d3e", "address": "fa:16:3e:fe:5f:80", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap227bb3d2-fe", "ovs_interfaceid": "227bb3d2-feed-4b53-8666-bc56eb0c3d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 553.405992] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:5f:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '227bb3d2-feed-4b53-8666-bc56eb0c3d3e', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 553.413633] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Creating folder: Project (552a7db99e8a4daaba912b390a4d1982). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 553.413926] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1cec16f-2212-4efe-a39c-5f4477c82388 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.424562] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Created folder: Project (552a7db99e8a4daaba912b390a4d1982) in parent group-v535199. [ 553.424769] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Creating folder: Instances. Parent ref: group-v535215. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 553.425023] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76b2cb00-67bf-44e7-a2fd-1ac540811af8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.435381] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Created folder: Instances in parent group-v535215. [ 553.435715] env[62974]: DEBUG oslo.service.loopingcall [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.435933] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 553.436222] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2932296d-da66-406e-bb61-97db052c355a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.463344] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 553.463344] env[62974]: value = "task-2653625" [ 553.463344] env[62974]: _type = "Task" [ 553.463344] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.472880] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653625, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.536377] env[62974]: DEBUG nova.network.neutron [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Successfully updated port: a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 553.561139] env[62974]: DEBUG oslo_concurrency.lockutils [None req-02d70f1c-ce1b-4e2f-9220-07991b54dbe1 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.783s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.595785] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653621, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.688026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.688026] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 553.690068] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.216s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.694232] env[62974]: INFO nova.compute.claims [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.710862] env[62974]: DEBUG oslo_vmware.api [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653618, 'name': PowerOnVM_Task, 'duration_secs': 0.519682} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.711139] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 553.711386] env[62974]: INFO nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Took 14.48 seconds to spawn the instance on the hypervisor. [ 553.711562] env[62974]: DEBUG nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 553.713057] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bc3c87-58b7-4578-a97b-2b4cf185234d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.732412] env[62974]: DEBUG oslo_concurrency.lockutils [req-d8a9cecc-fe19-4c69-8f93-8773ac6e0de4 req-f174cfbf-c23b-44a7-ba18-9c0432ad36e4 service nova] Releasing lock "refresh_cache-8f4faa77-4f18-41da-b8d0-efba799d6ec6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.793566] env[62974]: DEBUG oslo_vmware.api [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653620, 'name': PowerOnVM_Task, 'duration_secs': 0.709574} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.794096] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 553.794351] env[62974]: INFO nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Took 12.37 seconds to spawn the instance on the hypervisor. [ 553.794591] env[62974]: DEBUG nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 553.796363] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd34bb2a-b9c8-4882-a5d5-6c469d176f6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.818180] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.883709] env[62974]: DEBUG nova.compute.manager [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Received event network-vif-plugged-227bb3d2-feed-4b53-8666-bc56eb0c3d3e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 553.883784] env[62974]: DEBUG oslo_concurrency.lockutils [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] Acquiring lock "2313468e-820f-4fff-bdeb-5d542c94584d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.884046] env[62974]: DEBUG oslo_concurrency.lockutils [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] Lock "2313468e-820f-4fff-bdeb-5d542c94584d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.884243] env[62974]: DEBUG oslo_concurrency.lockutils [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] Lock "2313468e-820f-4fff-bdeb-5d542c94584d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.884448] env[62974]: DEBUG nova.compute.manager [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] No waiting events found dispatching network-vif-plugged-227bb3d2-feed-4b53-8666-bc56eb0c3d3e {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 553.884570] env[62974]: WARNING nova.compute.manager [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Received unexpected event network-vif-plugged-227bb3d2-feed-4b53-8666-bc56eb0c3d3e for instance with vm_state building and task_state spawning. [ 553.884791] env[62974]: DEBUG nova.compute.manager [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Received event network-changed-227bb3d2-feed-4b53-8666-bc56eb0c3d3e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 553.884891] env[62974]: DEBUG nova.compute.manager [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Refreshing instance network info cache due to event network-changed-227bb3d2-feed-4b53-8666-bc56eb0c3d3e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 553.885094] env[62974]: DEBUG oslo_concurrency.lockutils [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] Acquiring lock "refresh_cache-2313468e-820f-4fff-bdeb-5d542c94584d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.885686] env[62974]: DEBUG oslo_concurrency.lockutils [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] Acquired lock "refresh_cache-2313468e-820f-4fff-bdeb-5d542c94584d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.885957] env[62974]: DEBUG nova.network.neutron [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Refreshing network info cache for port 227bb3d2-feed-4b53-8666-bc56eb0c3d3e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.975102] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653625, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.040539] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.040807] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquired lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.040880] env[62974]: DEBUG nova.network.neutron [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 554.064083] env[62974]: DEBUG nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 554.099138] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653621, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665995} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.099486] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8f4faa77-4f18-41da-b8d0-efba799d6ec6/8f4faa77-4f18-41da-b8d0-efba799d6ec6.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 554.099796] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 554.100039] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c4536f6-02a1-4f49-9287-f6a3743624e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.108980] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 554.108980] env[62974]: value = "task-2653626" [ 554.108980] env[62974]: _type = "Task" [ 554.108980] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.122136] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.202017] env[62974]: DEBUG nova.compute.utils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.209737] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.209912] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 554.243649] env[62974]: INFO nova.compute.manager [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Took 19.99 seconds to build instance. [ 554.286806] env[62974]: DEBUG nova.policy [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d977aa355614e2bbab76080bbe411b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c540bb1bb0e4e86a6e067653ae20895', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 554.333520] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653622, 'name': ReconfigVM_Task, 'duration_secs': 0.72121} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.334298] env[62974]: INFO nova.compute.manager [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Took 17.71 seconds to build instance. [ 554.335202] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e/2174cb7d-3e73-4529-b9f8-735dd6dfcf4e.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 554.336034] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c179fce-38dc-4e5f-8b0a-811c904c58a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.348028] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 554.348028] env[62974]: value = "task-2653627" [ 554.348028] env[62974]: _type = "Task" [ 554.348028] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.362184] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653627, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.473594] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653625, 'name': CreateVM_Task, 'duration_secs': 0.597194} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.474228] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 554.475855] env[62974]: DEBUG oslo_vmware.service [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254f8fb8-d7c1-4196-b08f-6b1fb30f1e42 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.481781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.481949] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.482663] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.482663] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-541c3c8c-ec97-41e2-baa2-423f06d203c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.488184] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 554.488184] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523ba7f5-4d8a-06bd-f7d9-d3bd417a0369" [ 554.488184] env[62974]: _type = "Task" [ 554.488184] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.498414] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523ba7f5-4d8a-06bd-f7d9-d3bd417a0369, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.595958] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.619929] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.358123} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.620276] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 554.621352] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bfe680-bf33-4825-84a9-20912c6a6def {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.652254] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 8f4faa77-4f18-41da-b8d0-efba799d6ec6/8f4faa77-4f18-41da-b8d0-efba799d6ec6.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 554.652254] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fef72eda-27ba-4879-9fe0-20073e8354d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.671169] env[62974]: DEBUG nova.network.neutron [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.679043] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 554.679043] env[62974]: value = "task-2653628" [ 554.679043] env[62974]: _type = "Task" [ 554.679043] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.689306] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653628, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.711412] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 554.750466] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0f1ab84-ee22-42a0-b87c-2839b9fbc56f tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.504s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.836779] env[62974]: DEBUG oslo_concurrency.lockutils [None req-739e99d0-81c3-4807-bf9a-6af8507e83dc tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.228s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.858871] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653627, 'name': Rename_Task, 'duration_secs': 0.284705} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.862454] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 554.863391] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2d03a5d-dfa2-4faa-9e7d-851ba56e1b5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.870305] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 554.870305] env[62974]: value = "task-2653629" [ 554.870305] env[62974]: _type = "Task" [ 554.870305] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.886529] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653629, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.936782] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b132a1d-f998-4068-9ac1-41c855b6f4f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.944954] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04991e2c-b0e5-4b73-b91d-a86f4038e268 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.980353] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df110d3-ad7b-4d6c-99e0-c28e0fa8c8ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.988139] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be25241-6b20-469f-81c7-d164b41a4aef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.010143] env[62974]: DEBUG nova.compute.provider_tree [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.012385] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.013821] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.013821] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.013821] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.013821] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 555.013821] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99f53159-48d2-4377-993f-c0cca7d1c34e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.026011] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 555.026308] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 555.027053] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa3dde7-66f1-41be-8447-3a57e021a8eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.035881] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f67c91c1-615c-4eb1-8c2e-811791ab6d6c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.041487] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 555.041487] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521d0455-a588-60af-c1cf-71d3734ce153" [ 555.041487] env[62974]: _type = "Task" [ 555.041487] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.051038] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521d0455-a588-60af-c1cf-71d3734ce153, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.070518] env[62974]: DEBUG nova.network.neutron [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Updated VIF entry in instance network info cache for port 227bb3d2-feed-4b53-8666-bc56eb0c3d3e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 555.070867] env[62974]: DEBUG nova.network.neutron [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Updating instance_info_cache with network_info: [{"id": "227bb3d2-feed-4b53-8666-bc56eb0c3d3e", "address": "fa:16:3e:fe:5f:80", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap227bb3d2-fe", "ovs_interfaceid": "227bb3d2-feed-4b53-8666-bc56eb0c3d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.192291] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.254552] env[62974]: DEBUG nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 555.303326] env[62974]: DEBUG nova.network.neutron [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Updating instance_info_cache with network_info: [{"id": "a4073f26-c2d4-4275-aced-337895f21b0c", "address": "fa:16:3e:a8:2c:86", "network": {"id": "b099dd4d-3410-4464-ba41-9f3bf3fd709b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-373447992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d546e5faf230414aa1cb1cb08bcc6bcc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4073f26-c2", "ovs_interfaceid": "a4073f26-c2d4-4275-aced-337895f21b0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.344774] env[62974]: DEBUG nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 555.359097] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Successfully created port: 44fa1a20-5950-4b22-8e9b-213c4323f03f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.382829] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653629, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.419618] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "1933bc47-1717-48c1-b4a2-492a17573de7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.419850] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.513203] env[62974]: DEBUG nova.scheduler.client.report [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 555.562753] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Preparing fetch location {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 555.563069] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Creating directory with path [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 555.563578] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7396f5c-7114-4688-90ef-2deda746bb43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.573460] env[62974]: DEBUG oslo_concurrency.lockutils [req-2ee059bd-bcb3-41c1-b7bb-ad60fd71f5e3 req-e6adfd40-f7a6-4abc-8adb-1ab1c1147642 service nova] Releasing lock "refresh_cache-2313468e-820f-4fff-bdeb-5d542c94584d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.590496] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Created directory with path [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 555.591741] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Fetch image to [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 555.591972] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Downloading image file data 807f8582-499f-47ee-9d5b-755c9f39bc39 to [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk on the data store datastore1 {{(pid=62974) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 555.592849] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4d53df-7bc2-4f6f-b352-06342968d245 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.602391] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3594ed8d-2499-4803-95bf-6daba4b5a0bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.614928] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6cf46e-d3ec-43ec-9222-a8ec64cc4ed8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.654160] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ed344b-d6c2-44b1-bf55-dcb3f3514dfc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.661078] env[62974]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-11933075-dda6-4140-b70a-db9887e54adc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.693086] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.695330] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Downloading image file data 807f8582-499f-47ee-9d5b-755c9f39bc39 to the data store datastore1 {{(pid=62974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 555.724758] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 555.760097] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 555.760097] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.760097] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 555.760727] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.760727] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 555.760727] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 555.760727] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 555.760904] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 555.760976] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 555.761134] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 555.761350] env[62974]: DEBUG nova.virt.hardware [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 555.766928] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05aacd40-00dd-49d5-b5a2-da2606ca59c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.786522] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Successfully updated port: 8f4af602-edfd-46cd-8684-cff88d420350 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.788807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e095bc-d2c4-4ed0-abf5-0e257a569341 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.800427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.801904] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 555.814909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Releasing lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.815241] env[62974]: DEBUG nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Instance network_info: |[{"id": "a4073f26-c2d4-4275-aced-337895f21b0c", "address": "fa:16:3e:a8:2c:86", "network": {"id": "b099dd4d-3410-4464-ba41-9f3bf3fd709b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-373447992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d546e5faf230414aa1cb1cb08bcc6bcc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4073f26-c2", "ovs_interfaceid": "a4073f26-c2d4-4275-aced-337895f21b0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 555.816826] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:2c:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4073f26-c2d4-4275-aced-337895f21b0c', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.824720] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Creating folder: Project (d546e5faf230414aa1cb1cb08bcc6bcc). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 555.884562] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-461963a9-8183-4d5b-81e5-11cdef43f1cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.904202] env[62974]: DEBUG oslo_vmware.api [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653629, 'name': PowerOnVM_Task, 'duration_secs': 0.860803} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.905402] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 555.908256] env[62974]: INFO nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Took 12.19 seconds to spawn the instance on the hypervisor. [ 555.908256] env[62974]: DEBUG nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 555.908256] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Created folder: Project (d546e5faf230414aa1cb1cb08bcc6bcc) in parent group-v535199. [ 555.908256] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Creating folder: Instances. Parent ref: group-v535218. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 555.908256] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c101594b-da0e-455c-8659-6c6c877542ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.910380] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-390a8b08-9577-4de4-8777-ad18cd65337b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.923236] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Created folder: Instances in parent group-v535218. [ 555.923577] env[62974]: DEBUG oslo.service.loopingcall [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.924705] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.928063] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 555.928633] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2f3b7f0-9f9b-4c35-911b-32dad9e73085 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.950294] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.950294] env[62974]: value = "task-2653632" [ 555.950294] env[62974]: _type = "Task" [ 555.950294] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.965542] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653632, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.020416] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.021082] env[62974]: DEBUG nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 556.024913] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.010s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.192421] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653628, 'name': ReconfigVM_Task, 'duration_secs': 1.218712} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.192610] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 8f4faa77-4f18-41da-b8d0-efba799d6ec6/8f4faa77-4f18-41da-b8d0-efba799d6ec6.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 556.193280] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76ea808d-c54e-4cca-8948-4f527106ae92 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.203632] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 556.203632] env[62974]: value = "task-2653633" [ 556.203632] env[62974]: _type = "Task" [ 556.203632] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.214872] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653633, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.299937] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "refresh_cache-a8446718-f2df-4bad-b5e3-537f19daa823" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.300108] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "refresh_cache-a8446718-f2df-4bad-b5e3-537f19daa823" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.300244] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.447971] env[62974]: INFO nova.compute.manager [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Took 19.61 seconds to build instance. [ 556.477614] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653632, 'name': CreateVM_Task, 'duration_secs': 0.382175} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.478591] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 556.479324] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.479477] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.479789] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.480145] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f38df0b0-288e-4438-a95a-0e7379daee34 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.490717] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 556.490717] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523fcc2d-b3b4-c388-f80b-9c8ef0b597ba" [ 556.490717] env[62974]: _type = "Task" [ 556.490717] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.501847] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523fcc2d-b3b4-c388-f80b-9c8ef0b597ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.528062] env[62974]: DEBUG nova.compute.utils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 556.534995] env[62974]: DEBUG nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 556.539203] env[62974]: DEBUG nova.network.neutron [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 556.562505] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Completed reading data from the image iterator. {{(pid=62974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 556.562720] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 556.719488] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653633, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.720085] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Downloaded image file data 807f8582-499f-47ee-9d5b-755c9f39bc39 to vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk on the data store datastore1 {{(pid=62974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 556.722786] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Caching image {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 556.723041] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Copying Virtual Disk [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk to [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 556.723340] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e404647-fa69-418c-a509-6736baeb475e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.730237] env[62974]: DEBUG nova.policy [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32c96e2ef0194f61b9a3f83fe73cd3f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca220df51dc0414ea400a56fe5e49e1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 556.738507] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 556.738507] env[62974]: value = "task-2653634" [ 556.738507] env[62974]: _type = "Task" [ 556.738507] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.751290] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.853476] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.958673] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1af09977-4329-4c79-9e11-e234285d2919 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.133s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.005979] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.006346] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.006876] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.032750] env[62974]: DEBUG nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 557.072190] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance d8b7a39f-ec73-4a87-9b1e-9428ca72f895 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074588] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 572c2c5f-6a24-4532-9c80-d76017e4aaa1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074588] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074588] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074588] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 8f4faa77-4f18-41da-b8d0-efba799d6ec6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074723] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 2313468e-820f-4fff-bdeb-5d542c94584d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074723] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074723] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a8446718-f2df-4bad-b5e3-537f19daa823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074723] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 2a498460-fced-410b-8b33-3595a2ac6753 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.074837] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a63aa120-1c7b-4abc-93cf-4d138f5cebde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 557.224622] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653633, 'name': Rename_Task, 'duration_secs': 0.839967} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.224622] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 557.224622] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a65cfb3a-1910-4ee7-9dfe-ba4b30202b99 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.229433] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 557.229433] env[62974]: value = "task-2653635" [ 557.229433] env[62974]: _type = "Task" [ 557.229433] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.238349] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653635, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.251452] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653634, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.462891] env[62974]: DEBUG nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 557.485435] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Updating instance_info_cache with network_info: [{"id": "8f4af602-edfd-46cd-8684-cff88d420350", "address": "fa:16:3e:e2:28:64", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f4af602-ed", "ovs_interfaceid": "8f4af602-edfd-46cd-8684-cff88d420350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.578915] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 001557f9-ea50-4e86-9eeb-dd4436791453 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.755312] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653634, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.759357] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653635, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.768220] env[62974]: DEBUG nova.network.neutron [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Successfully created port: 7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 557.989157] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "refresh_cache-a8446718-f2df-4bad-b5e3-537f19daa823" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.989507] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Instance network_info: |[{"id": "8f4af602-edfd-46cd-8684-cff88d420350", "address": "fa:16:3e:e2:28:64", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f4af602-ed", "ovs_interfaceid": "8f4af602-edfd-46cd-8684-cff88d420350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 557.990827] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.991103] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:28:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f4af602-edfd-46cd-8684-cff88d420350', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.999390] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Creating folder: Project (7c540bb1bb0e4e86a6e067653ae20895). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.999698] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a046411a-e7e5-46cd-b05b-48c043595449 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.008549] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Created folder: Project (7c540bb1bb0e4e86a6e067653ae20895) in parent group-v535199. [ 558.009039] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Creating folder: Instances. Parent ref: group-v535221. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 558.009039] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e03492fe-c2f0-4d7d-ab3c-403eb6325674 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.018244] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Created folder: Instances in parent group-v535221. [ 558.018521] env[62974]: DEBUG oslo.service.loopingcall [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 558.018689] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 558.018897] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29f12a36-e35b-4907-af57-8dd6e0584371 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.038511] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 558.038511] env[62974]: value = "task-2653638" [ 558.038511] env[62974]: _type = "Task" [ 558.038511] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.047640] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653638, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.048833] env[62974]: DEBUG nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 558.071697] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Acquiring lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.071995] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.072262] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Acquiring lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.072446] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.072606] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.076680] env[62974]: INFO nova.compute.manager [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Terminating instance [ 558.080138] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 558.080394] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.080534] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 558.080712] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.080854] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 558.081010] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 558.081257] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 558.081417] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 558.081579] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 558.081739] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 558.081907] env[62974]: DEBUG nova.virt.hardware [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 558.082595] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cf73422d-7f4b-4bae-9d69-de74d7211243 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.091491] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e5ce7b-9287-4212-a756-6073f51c0a55 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.105981] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ce09bf-48e6-4aac-931b-5bdcf5a50b22 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.243552] env[62974]: DEBUG oslo_vmware.api [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653635, 'name': PowerOnVM_Task, 'duration_secs': 0.744801} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.247876] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 558.248453] env[62974]: INFO nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Took 12.25 seconds to spawn the instance on the hypervisor. [ 558.248947] env[62974]: DEBUG nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 558.249944] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde886b0-f747-401a-8a14-acf412117729 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.259366] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653634, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.22287} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.261174] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Copied Virtual Disk [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk to [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 558.261401] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Deleting the datastore file [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39/tmp-sparse.vmdk {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 558.266109] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-731fc2e8-b1ea-4b4b-b0ec-f5f8b3f07e96 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.275822] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 558.275822] env[62974]: value = "task-2653639" [ 558.275822] env[62974]: _type = "Task" [ 558.275822] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.289592] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.550423] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653638, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.597307] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 7f0d367d-9d60-414b-990e-56a2b43fd963 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.599793] env[62974]: DEBUG nova.compute.manager [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 558.600000] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 558.600941] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d56734-b072-476f-ab4e-30f271eef41c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.610529] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 558.611139] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6ee6ee9-78bb-4bd7-8f08-fac3a2520c82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.614305] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Successfully updated port: 44fa1a20-5950-4b22-8e9b-213c4323f03f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 558.623114] env[62974]: DEBUG oslo_vmware.api [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Waiting for the task: (returnval){ [ 558.623114] env[62974]: value = "task-2653640" [ 558.623114] env[62974]: _type = "Task" [ 558.623114] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.639211] env[62974]: DEBUG oslo_vmware.api [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Task: {'id': task-2653640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.784669] env[62974]: INFO nova.compute.manager [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Took 21.01 seconds to build instance. [ 558.792918] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037121} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.792918] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 558.792918] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Moving file from [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78/807f8582-499f-47ee-9d5b-755c9f39bc39 to [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39. {{(pid=62974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 558.792918] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-3287a497-200f-483d-a837-fbc5a864fc8b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.801806] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 558.801806] env[62974]: value = "task-2653641" [ 558.801806] env[62974]: _type = "Task" [ 558.801806] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.812987] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653641, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.977354] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Received event network-vif-plugged-a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 558.977354] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Acquiring lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.977354] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.977354] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.977656] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] No waiting events found dispatching network-vif-plugged-a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 558.977716] env[62974]: WARNING nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Received unexpected event network-vif-plugged-a4073f26-c2d4-4275-aced-337895f21b0c for instance with vm_state building and task_state spawning. [ 558.977964] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Received event network-changed-a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 558.978067] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Refreshing instance network info cache due to event network-changed-a4073f26-c2d4-4275-aced-337895f21b0c. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 558.978235] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Acquiring lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.978368] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Acquired lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.979624] env[62974]: DEBUG nova.network.neutron [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Refreshing network info cache for port a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 559.050717] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653638, 'name': CreateVM_Task, 'duration_secs': 0.6128} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.051246] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 559.052015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.052457] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.052762] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 559.053095] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df74d09e-ed99-41be-896d-438c98186482 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.058342] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 559.058342] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523edba5-456e-f516-92e8-9fad5ab4289f" [ 559.058342] env[62974]: _type = "Task" [ 559.058342] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.068399] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523edba5-456e-f516-92e8-9fad5ab4289f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.103019] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 1933bc47-1717-48c1-b4a2-492a17573de7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.103019] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 559.103019] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 559.120186] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "refresh_cache-2a498460-fced-410b-8b33-3595a2ac6753" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.120186] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "refresh_cache-2a498460-fced-410b-8b33-3595a2ac6753" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.120186] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 559.139854] env[62974]: DEBUG oslo_vmware.api [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Task: {'id': task-2653640, 'name': PowerOffVM_Task, 'duration_secs': 0.218559} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.143587] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 559.143587] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 559.144134] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a5ac46e-4173-4c04-ab55-98a7507a529e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.222776] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 559.223174] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 559.223295] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Deleting the datastore file [datastore2] 572c2c5f-6a24-4532-9c80-d76017e4aaa1 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.223577] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0dd85d6-d48a-43af-97f8-58204d4b4b00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.239187] env[62974]: DEBUG oslo_vmware.api [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Waiting for the task: (returnval){ [ 559.239187] env[62974]: value = "task-2653643" [ 559.239187] env[62974]: _type = "Task" [ 559.239187] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.251043] env[62974]: DEBUG oslo_vmware.api [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Task: {'id': task-2653643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.291946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dfcdc620-5825-49c9-ad0b-7e0d63a24e60 tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.529s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.313188] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653641, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.036618} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.314890] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] File moved {{(pid=62974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 559.315104] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Cleaning up location [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 559.315194] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Deleting the datastore file [datastore1] vmware_temp/8ef113bd-e509-4e0a-93a2-2959e3826b78 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.318466] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e44a2868-aa2c-4d5a-b53d-7ef434dcffe6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.326310] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 559.326310] env[62974]: value = "task-2653644" [ 559.326310] env[62974]: _type = "Task" [ 559.326310] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.335094] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.368882] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99182a8-8cef-40ad-b9e2-d370d2a2e1ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.378915] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0caf08-308a-4b0c-8a99-7bbec4d23162 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.419169] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931a824e-40c5-419d-8ca2-d76a7611c402 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.427929] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0158596d-c19a-45c0-9b3d-f7f99ae7431b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.443246] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.570904] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523edba5-456e-f516-92e8-9fad5ab4289f, 'name': SearchDatastore_Task, 'duration_secs': 0.010241} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.571596] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.572019] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 559.572445] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.753484] env[62974]: DEBUG oslo_vmware.api [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Task: {'id': task-2653643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150169} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.754490] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 559.754743] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 559.758025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 559.758025] env[62974]: INFO nova.compute.manager [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 559.758025] env[62974]: DEBUG oslo.service.loopingcall [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.758025] env[62974]: DEBUG nova.compute.manager [-] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 559.758025] env[62974]: DEBUG nova.network.neutron [-] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 559.838035] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025919} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.838323] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 559.839307] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4857692c-6184-4c00-a0ca-0e590bda535b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.846110] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 559.846110] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529ddd1b-5f26-f900-3747-9a21894149b1" [ 559.846110] env[62974]: _type = "Task" [ 559.846110] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.855471] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529ddd1b-5f26-f900-3747-9a21894149b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.920629] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.951654] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 560.288273] env[62974]: DEBUG nova.network.neutron [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Updated VIF entry in instance network info cache for port a4073f26-c2d4-4275-aced-337895f21b0c. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 560.288273] env[62974]: DEBUG nova.network.neutron [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Updating instance_info_cache with network_info: [{"id": "a4073f26-c2d4-4275-aced-337895f21b0c", "address": "fa:16:3e:a8:2c:86", "network": {"id": "b099dd4d-3410-4464-ba41-9f3bf3fd709b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-373447992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d546e5faf230414aa1cb1cb08bcc6bcc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4073f26-c2", "ovs_interfaceid": "a4073f26-c2d4-4275-aced-337895f21b0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.359984] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529ddd1b-5f26-f900-3747-9a21894149b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010645} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.359984] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.360178] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 2313468e-820f-4fff-bdeb-5d542c94584d/2313468e-820f-4fff-bdeb-5d542c94584d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 560.360448] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.360621] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 560.360818] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7b2269d-bc6a-447f-a8ef-285e1e76c4cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.363114] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a03d6966-cf13-4d30-9745-5e79d6812d69 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.371017] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 560.371017] env[62974]: value = "task-2653645" [ 560.371017] env[62974]: _type = "Task" [ 560.371017] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.372756] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 560.372977] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 560.376982] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-525def73-0e84-4fce-81b6-7a80a85a9e31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.380133] env[62974]: DEBUG nova.network.neutron [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Updating instance_info_cache with network_info: [{"id": "44fa1a20-5950-4b22-8e9b-213c4323f03f", "address": "fa:16:3e:4d:45:91", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44fa1a20-59", "ovs_interfaceid": "44fa1a20-5950-4b22-8e9b-213c4323f03f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.390035] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 560.390035] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5283db7d-f549-646d-ac1c-43b1bf590f98" [ 560.390035] env[62974]: _type = "Task" [ 560.390035] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.390035] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653645, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.399175] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5283db7d-f549-646d-ac1c-43b1bf590f98, 'name': SearchDatastore_Task, 'duration_secs': 0.009277} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.401223] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff1a96d6-874e-494e-8884-b84c61717bd0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.408173] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 560.408173] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525bf007-080a-e68c-26ab-9bc54e2030e0" [ 560.408173] env[62974]: _type = "Task" [ 560.408173] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.418154] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525bf007-080a-e68c-26ab-9bc54e2030e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.458695] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 560.458695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.433s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.458695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.862s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.459591] env[62974]: INFO nova.compute.claims [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.757192] env[62974]: DEBUG nova.compute.manager [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 560.793303] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Releasing lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.793303] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Received event network-vif-plugged-8f4af602-edfd-46cd-8684-cff88d420350 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 560.793303] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Acquiring lock "a8446718-f2df-4bad-b5e3-537f19daa823-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.793303] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Lock "a8446718-f2df-4bad-b5e3-537f19daa823-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.793303] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Lock "a8446718-f2df-4bad-b5e3-537f19daa823-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.793453] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] No waiting events found dispatching network-vif-plugged-8f4af602-edfd-46cd-8684-cff88d420350 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 560.793453] env[62974]: WARNING nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Received unexpected event network-vif-plugged-8f4af602-edfd-46cd-8684-cff88d420350 for instance with vm_state building and task_state spawning. [ 560.793453] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Received event network-changed-8f4af602-edfd-46cd-8684-cff88d420350 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 560.793453] env[62974]: DEBUG nova.compute.manager [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Refreshing instance network info cache due to event network-changed-8f4af602-edfd-46cd-8684-cff88d420350. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 560.793453] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Acquiring lock "refresh_cache-a8446718-f2df-4bad-b5e3-537f19daa823" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.793591] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Acquired lock "refresh_cache-a8446718-f2df-4bad-b5e3-537f19daa823" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.793591] env[62974]: DEBUG nova.network.neutron [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Refreshing network info cache for port 8f4af602-edfd-46cd-8684-cff88d420350 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 560.798760] env[62974]: DEBUG nova.network.neutron [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Successfully updated port: 7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 560.886630] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "refresh_cache-2a498460-fced-410b-8b33-3595a2ac6753" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.886943] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Instance network_info: |[{"id": "44fa1a20-5950-4b22-8e9b-213c4323f03f", "address": "fa:16:3e:4d:45:91", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44fa1a20-59", "ovs_interfaceid": "44fa1a20-5950-4b22-8e9b-213c4323f03f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 560.889928] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653645, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453032} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.890382] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:45:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44fa1a20-5950-4b22-8e9b-213c4323f03f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 560.900479] env[62974]: DEBUG oslo.service.loopingcall [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.900683] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 2313468e-820f-4fff-bdeb-5d542c94584d/2313468e-820f-4fff-bdeb-5d542c94584d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 560.900873] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 560.901635] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 560.904028] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c7eae52-59af-4dc0-b382-517464abb35f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.904228] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-291d6539-c717-4ebf-a3f4-42cb855028a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.928529] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 560.928529] env[62974]: value = "task-2653646" [ 560.928529] env[62974]: _type = "Task" [ 560.928529] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.937211] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 560.937211] env[62974]: value = "task-2653647" [ 560.937211] env[62974]: _type = "Task" [ 560.937211] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.938218] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525bf007-080a-e68c-26ab-9bc54e2030e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009618} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.938935] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.939247] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc/7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 560.944800] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.944800] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 560.944800] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9a0ce10-b458-4715-9cf6-2d20801b12d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.952148] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a398e96-5349-49cf-92e7-257a9e2ec436 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.954024] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653646, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.960656] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653647, 'name': CreateVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.962799] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 560.962799] env[62974]: value = "task-2653648" [ 560.962799] env[62974]: _type = "Task" [ 560.962799] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.963462] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 560.963462] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 560.964070] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba0b2a9d-5a7f-4d22-a279-efd41a777ce9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.977379] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 560.977379] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c25bd5-da4b-e7e0-81dc-3ba28f65c0da" [ 560.977379] env[62974]: _type = "Task" [ 560.977379] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.984127] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653648, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.993057] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c25bd5-da4b-e7e0-81dc-3ba28f65c0da, 'name': SearchDatastore_Task, 'duration_secs': 0.010034} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.993732] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb4ca145-25e4-48f3-bca9-cd052f00d724 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.000096] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 561.000096] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52080a03-9a20-ccf5-d8fe-524b7fd17644" [ 561.000096] env[62974]: _type = "Task" [ 561.000096] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.008499] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "f9adcd7e-58a0-433c-8602-cca814b84aaa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.008773] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.017549] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52080a03-9a20-ccf5-d8fe-524b7fd17644, 'name': SearchDatastore_Task, 'duration_secs': 0.009616} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.017549] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.017549] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a8446718-f2df-4bad-b5e3-537f19daa823/a8446718-f2df-4bad-b5e3-537f19daa823.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 561.017549] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8af20a0-63a8-4c64-992e-02b131230c7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.025621] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 561.025621] env[62974]: value = "task-2653649" [ 561.025621] env[62974]: _type = "Task" [ 561.025621] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.034305] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.287591] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.308340] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.308340] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.308340] env[62974]: DEBUG nova.network.neutron [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 561.448948] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653646, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072634} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.448948] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 561.449129] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d4a187-6422-4164-82c5-562fcdeb443b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.465529] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653647, 'name': CreateVM_Task, 'duration_secs': 0.399494} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.478282] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 561.511958] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.512206] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.512505] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 561.513812] env[62974]: DEBUG nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 561.531630] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 2313468e-820f-4fff-bdeb-5d542c94584d/2313468e-820f-4fff-bdeb-5d542c94584d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 561.536035] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-808b23cc-a52e-4fd9-877c-addae044fca6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.540778] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8e553b2-8ace-4d7d-ab25-5396738e40df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.564568] env[62974]: DEBUG nova.network.neutron [-] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.565695] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653648, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487342} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.569942] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc/7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 561.570364] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 561.571103] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97066961-d908-4971-887c-ad016ed6fa10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.579555] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 561.579555] env[62974]: value = "task-2653650" [ 561.579555] env[62974]: _type = "Task" [ 561.579555] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.579818] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653649, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.580035] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 561.580035] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e382ae-9c98-7f00-dae9-288ca32bd456" [ 561.580035] env[62974]: _type = "Task" [ 561.580035] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.594415] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 561.594415] env[62974]: value = "task-2653651" [ 561.594415] env[62974]: _type = "Task" [ 561.594415] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.604503] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e382ae-9c98-7f00-dae9-288ca32bd456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.604774] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653650, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.614878] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653651, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.824075] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fd16af-44de-4053-9ed9-aa4b874d84a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.832145] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486664cd-8752-4ea2-9d98-ef428fc43f13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.863599] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a43eed-22d4-40da-a230-00d6166c93d4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.866932] env[62974]: DEBUG nova.network.neutron [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 561.876970] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc0eeee-7b39-4474-b438-53d59d59e57f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.889859] env[62974]: DEBUG nova.compute.provider_tree [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.974029] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.974297] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.974523] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.974734] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.974919] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.977481] env[62974]: INFO nova.compute.manager [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Terminating instance [ 562.053076] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741169} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.053394] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a8446718-f2df-4bad-b5e3-537f19daa823/a8446718-f2df-4bad-b5e3-537f19daa823.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 562.053657] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 562.053864] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b5edbe0-e814-4d2b-9d99-7d63ab9850a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.061472] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 562.061472] env[62974]: value = "task-2653652" [ 562.061472] env[62974]: _type = "Task" [ 562.061472] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.064171] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.069295] env[62974]: INFO nova.compute.manager [-] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Took 2.31 seconds to deallocate network for instance. [ 562.088584] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.104393] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.109028] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e382ae-9c98-7f00-dae9-288ca32bd456, 'name': SearchDatastore_Task, 'duration_secs': 0.059368} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.109343] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.109689] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 562.109951] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.110161] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.110350] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 562.113795] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f36d6294-56bd-4815-93ef-c7e5ab1c78cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.117115] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653651, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102589} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.117115] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 562.117269] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4a492c-8741-43ee-85cc-74106d126735 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.123278] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 562.126597] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 562.133580] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2720409a-f04a-472a-9322-6cb3b53c3856 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.144721] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc/7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 562.145749] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39e25845-f91d-43d8-97f1-d7d194279e2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.166890] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 562.166890] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520f1839-a637-7168-8c4e-d3defef1488b" [ 562.166890] env[62974]: _type = "Task" [ 562.166890] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.168326] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 562.168326] env[62974]: value = "task-2653653" [ 562.168326] env[62974]: _type = "Task" [ 562.168326] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.180166] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520f1839-a637-7168-8c4e-d3defef1488b, 'name': SearchDatastore_Task, 'duration_secs': 0.010111} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.184499] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653653, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.184499] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-116603a4-1bad-49a1-b876-9e8424a92a02 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.192382] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 562.192382] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5280a368-993a-cbe6-b347-85ee8b3ddd21" [ 562.192382] env[62974]: _type = "Task" [ 562.192382] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.200302] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5280a368-993a-cbe6-b347-85ee8b3ddd21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.346324] env[62974]: DEBUG nova.network.neutron [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Updated VIF entry in instance network info cache for port 8f4af602-edfd-46cd-8684-cff88d420350. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 562.346802] env[62974]: DEBUG nova.network.neutron [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Updating instance_info_cache with network_info: [{"id": "8f4af602-edfd-46cd-8684-cff88d420350", "address": "fa:16:3e:e2:28:64", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f4af602-ed", "ovs_interfaceid": "8f4af602-edfd-46cd-8684-cff88d420350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.393920] env[62974]: DEBUG nova.scheduler.client.report [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 562.459343] env[62974]: DEBUG nova.network.neutron [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [{"id": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "address": "fa:16:3e:cb:fb:0a", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dbab348-e4", "ovs_interfaceid": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.482553] env[62974]: DEBUG nova.compute.manager [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 562.482729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 562.484109] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b6703e-9616-400b-83e8-c550984862de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.494555] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 562.494813] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c0376c4-e89e-4510-811f-5f7340dea8e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.501955] env[62974]: DEBUG oslo_vmware.api [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 562.501955] env[62974]: value = "task-2653654" [ 562.501955] env[62974]: _type = "Task" [ 562.501955] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.516057] env[62974]: DEBUG oslo_vmware.api [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653654, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.576476] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061494} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.576903] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 562.580208] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d47c908-bef9-4256-a7d4-a8a0d9b174a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.601146] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.611024] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] a8446718-f2df-4bad-b5e3-537f19daa823/a8446718-f2df-4bad-b5e3-537f19daa823.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 562.616481] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef56e0a4-f407-44bd-b583-d03f800d6feb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.640417] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.642472] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 562.642472] env[62974]: value = "task-2653655" [ 562.642472] env[62974]: _type = "Task" [ 562.642472] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.651723] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653655, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.682069] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653653, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.706114] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5280a368-993a-cbe6-b347-85ee8b3ddd21, 'name': SearchDatastore_Task, 'duration_secs': 0.008783} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.706114] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.706114] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 2a498460-fced-410b-8b33-3595a2ac6753/2a498460-fced-410b-8b33-3595a2ac6753.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 562.706114] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e260e3e-efdb-4e9d-8eea-614910c086ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.713203] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 562.713203] env[62974]: value = "task-2653656" [ 562.713203] env[62974]: _type = "Task" [ 562.713203] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.721700] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.851046] env[62974]: DEBUG oslo_concurrency.lockutils [req-23e9b476-0252-4d4e-bd8d-699f7f8eeeac req-afa27b9c-36e5-473f-b61d-0d99d28e8027 service nova] Releasing lock "refresh_cache-a8446718-f2df-4bad-b5e3-537f19daa823" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.902578] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.903095] env[62974]: DEBUG nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 562.906351] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.106s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.909254] env[62974]: INFO nova.compute.claims [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.963977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Releasing lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.964354] env[62974]: DEBUG nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Instance network_info: |[{"id": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "address": "fa:16:3e:cb:fb:0a", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dbab348-e4", "ovs_interfaceid": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 562.964802] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:fb:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88eedc4b-66dc-4845-9f95-858d6db12a7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dbab348-e4dd-46db-ae81-292fbfcd16dc', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 562.975421] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Creating folder: Project (ca220df51dc0414ea400a56fe5e49e1c). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 562.976572] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a757656-3778-42f9-b5f9-1085dbace62e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.990188] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Created folder: Project (ca220df51dc0414ea400a56fe5e49e1c) in parent group-v535199. [ 562.990783] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Creating folder: Instances. Parent ref: group-v535225. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 562.991143] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef8ea634-5269-4c52-a863-14afd93069ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.001828] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Created folder: Instances in parent group-v535225. [ 563.002087] env[62974]: DEBUG oslo.service.loopingcall [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.002279] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 563.002563] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69d6f61f-0625-45c7-a4a8-edbeafdee35d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.029449] env[62974]: DEBUG oslo_vmware.api [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653654, 'name': PowerOffVM_Task, 'duration_secs': 0.424386} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.031542] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 563.031542] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 563.031542] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 563.031542] env[62974]: value = "task-2653659" [ 563.031542] env[62974]: _type = "Task" [ 563.031542] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.031542] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ac090c5-ac57-4178-a3ea-39aef9b2d797 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.046101] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653659, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.112027] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 563.112027] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 563.112027] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Deleting the datastore file [datastore2] 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 563.117017] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66663853-6489-4f00-a752-a230ff17235c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.119581] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653650, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.134169] env[62974]: DEBUG oslo_vmware.api [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for the task: (returnval){ [ 563.134169] env[62974]: value = "task-2653661" [ 563.134169] env[62974]: _type = "Task" [ 563.134169] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.144583] env[62974]: DEBUG oslo_vmware.api [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.161981] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653655, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.183098] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653653, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.224461] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653656, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.416883] env[62974]: DEBUG nova.compute.utils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.421596] env[62974]: DEBUG nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 563.422063] env[62974]: DEBUG nova.network.neutron [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 563.468396] env[62974]: DEBUG nova.policy [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbc4e483455d48078a473ba08a4d8886', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'adf9a2a44db94217bdd7652ef27b5737', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 563.544715] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653659, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.613230] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653650, 'name': ReconfigVM_Task, 'duration_secs': 1.556884} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.613230] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 2313468e-820f-4fff-bdeb-5d542c94584d/2313468e-820f-4fff-bdeb-5d542c94584d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.614216] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83b1d54e-b362-4093-8293-dae3658eb149 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.623201] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 563.623201] env[62974]: value = "task-2653662" [ 563.623201] env[62974]: _type = "Task" [ 563.623201] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.630021] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653662, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.644016] env[62974]: DEBUG oslo_vmware.api [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Task: {'id': task-2653661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.341389} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.644741] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 563.645074] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 563.645394] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 563.646251] env[62974]: INFO nova.compute.manager [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 563.646648] env[62974]: DEBUG oslo.service.loopingcall [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.649973] env[62974]: DEBUG nova.compute.manager [-] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 563.650255] env[62974]: DEBUG nova.network.neutron [-] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 563.658130] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653655, 'name': ReconfigVM_Task, 'duration_secs': 0.659158} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.658260] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Reconfigured VM instance instance-00000008 to attach disk [datastore1] a8446718-f2df-4bad-b5e3-537f19daa823/a8446718-f2df-4bad-b5e3-537f19daa823.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.659041] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86d3efec-b167-45f6-aa87-0868f9a376c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.665947] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 563.665947] env[62974]: value = "task-2653663" [ 563.665947] env[62974]: _type = "Task" [ 563.665947] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.679097] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653663, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.684206] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653653, 'name': ReconfigVM_Task, 'duration_secs': 1.106134} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.684599] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc/7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.685735] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f8ee059-7706-4c13-b147-dc94539993df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.692655] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 563.692655] env[62974]: value = "task-2653664" [ 563.692655] env[62974]: _type = "Task" [ 563.692655] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.704417] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653664, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.728409] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653656, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689681} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.728683] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 2a498460-fced-410b-8b33-3595a2ac6753/2a498460-fced-410b-8b33-3595a2ac6753.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 563.729418] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 563.729418] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8eddd68-7f95-4d03-a481-79fec19f41a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.737038] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 563.737038] env[62974]: value = "task-2653665" [ 563.737038] env[62974]: _type = "Task" [ 563.737038] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.752987] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.929024] env[62974]: DEBUG nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 564.049744] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653659, 'name': CreateVM_Task, 'duration_secs': 0.771308} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.050250] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 564.051168] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.051470] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.052040] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 564.052942] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e93d776b-803d-42e8-a0c0-52cdd614cc40 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.057844] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 564.057844] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525fc428-a392-79fa-12eb-e0865c08eb9f" [ 564.057844] env[62974]: _type = "Task" [ 564.057844] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.074959] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525fc428-a392-79fa-12eb-e0865c08eb9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.133126] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653662, 'name': Rename_Task, 'duration_secs': 0.221799} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.137479] env[62974]: DEBUG nova.network.neutron [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Successfully created port: c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.139300] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 564.139780] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d4a3778-03a3-4632-966a-dfeda14fd412 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.147054] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 564.147054] env[62974]: value = "task-2653666" [ 564.147054] env[62974]: _type = "Task" [ 564.147054] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.156519] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653666, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.177843] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653663, 'name': Rename_Task, 'duration_secs': 0.198144} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.177843] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 564.177843] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b83b3754-deb2-4aa3-9aa2-8696fe547aa1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.185893] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 564.185893] env[62974]: value = "task-2653667" [ 564.185893] env[62974]: _type = "Task" [ 564.185893] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.202855] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653667, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.209277] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653664, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.254723] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093483} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.254723] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 564.255677] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbdae8a-4658-467c-9cac-d21c5eb2d694 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.283395] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 2a498460-fced-410b-8b33-3595a2ac6753/2a498460-fced-410b-8b33-3595a2ac6753.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 564.284861] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df27d782-fd4e-41a6-8231-b5cf2c49da46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.301657] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7491a533-c68a-474d-9e7a-4194d2d09ce9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.311110] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3d5fa2-b4e0-4998-a47c-c6bc4fcacbd4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.314386] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 564.314386] env[62974]: value = "task-2653668" [ 564.314386] env[62974]: _type = "Task" [ 564.314386] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.346031] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6927d1ba-6214-47c0-b0c5-ae2fb469055b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.352578] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.358114] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09899632-0781-407a-8e35-9f9b2312788c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.374245] env[62974]: DEBUG nova.compute.provider_tree [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.549175] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Received event network-vif-plugged-44fa1a20-5950-4b22-8e9b-213c4323f03f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 564.552583] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Acquiring lock "2a498460-fced-410b-8b33-3595a2ac6753-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.552583] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Lock "2a498460-fced-410b-8b33-3595a2ac6753-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.552583] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Lock "2a498460-fced-410b-8b33-3595a2ac6753-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.552583] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] No waiting events found dispatching network-vif-plugged-44fa1a20-5950-4b22-8e9b-213c4323f03f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 564.552583] env[62974]: WARNING nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Received unexpected event network-vif-plugged-44fa1a20-5950-4b22-8e9b-213c4323f03f for instance with vm_state building and task_state spawning. [ 564.552997] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Received event network-changed-44fa1a20-5950-4b22-8e9b-213c4323f03f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 564.552997] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Refreshing instance network info cache due to event network-changed-44fa1a20-5950-4b22-8e9b-213c4323f03f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 564.552997] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Acquiring lock "refresh_cache-2a498460-fced-410b-8b33-3595a2ac6753" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.552997] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Acquired lock "refresh_cache-2a498460-fced-410b-8b33-3595a2ac6753" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.552997] env[62974]: DEBUG nova.network.neutron [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Refreshing network info cache for port 44fa1a20-5950-4b22-8e9b-213c4323f03f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 564.570769] env[62974]: DEBUG nova.compute.manager [req-226110e0-ade9-43d4-b296-3b53d95df810 req-54bce419-e354-451b-b64b-cb5905e9eeeb service nova] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Received event network-vif-deleted-4f09f936-5667-4bf0-8972-a2531e87aaee {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 564.580052] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525fc428-a392-79fa-12eb-e0865c08eb9f, 'name': SearchDatastore_Task, 'duration_secs': 0.013651} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.580052] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.580052] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 564.580052] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.580249] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.580249] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 564.580249] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29b40e21-743c-4a43-b255-2732064c25d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.589885] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 564.590130] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 564.590863] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6634e13e-6767-48a7-8f3d-b6eec44b8586 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.597354] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 564.597354] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526aac97-75d0-5d12-aac1-05b8b9ed8581" [ 564.597354] env[62974]: _type = "Task" [ 564.597354] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.605809] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526aac97-75d0-5d12-aac1-05b8b9ed8581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.656874] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653666, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.697959] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653667, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.710626] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653664, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.829651] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.880024] env[62974]: DEBUG nova.scheduler.client.report [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 564.940771] env[62974]: DEBUG nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 564.974961] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 564.975416] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.975416] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 564.975586] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.976068] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 564.976068] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 564.976068] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 564.976216] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 564.976363] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 564.976514] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 564.976686] env[62974]: DEBUG nova.virt.hardware [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 564.977691] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb31ed7-3d10-4a70-98c4-dabb0a8c12ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.986679] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e825e50-ff63-4673-8d07-858df8f1d9d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.113991] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526aac97-75d0-5d12-aac1-05b8b9ed8581, 'name': SearchDatastore_Task, 'duration_secs': 0.010001} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.113991] env[62974]: DEBUG nova.network.neutron [-] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.119744] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a934af16-1aa1-4a64-8cd5-3c21be2044f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.128129] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 565.128129] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521a07d1-3152-c120-4827-d32d06761a4d" [ 565.128129] env[62974]: _type = "Task" [ 565.128129] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.142399] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521a07d1-3152-c120-4827-d32d06761a4d, 'name': SearchDatastore_Task, 'duration_secs': 0.009284} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.142399] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.142399] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a63aa120-1c7b-4abc-93cf-4d138f5cebde/a63aa120-1c7b-4abc-93cf-4d138f5cebde.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 565.142399] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5a171ab-1d56-4230-ac63-52650fd7ce8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.151096] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 565.151096] env[62974]: value = "task-2653669" [ 565.151096] env[62974]: _type = "Task" [ 565.151096] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.166112] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653666, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.166331] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.200963] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653667, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.211534] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653664, 'name': Rename_Task, 'duration_secs': 1.205774} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.212020] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 565.212203] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66ff8149-3619-42c5-8fa9-abc646361e53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.219857] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 565.219857] env[62974]: value = "task-2653670" [ 565.219857] env[62974]: _type = "Task" [ 565.219857] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.233676] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.335321] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653668, 'name': ReconfigVM_Task, 'duration_secs': 0.935668} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.338436] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 2a498460-fced-410b-8b33-3595a2ac6753/2a498460-fced-410b-8b33-3595a2ac6753.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 565.339845] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fdf6998-9674-4be2-8c78-1da4e8d99dd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.352305] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 565.352305] env[62974]: value = "task-2653671" [ 565.352305] env[62974]: _type = "Task" [ 565.352305] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.370016] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653671, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.383691] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.384259] env[62974]: DEBUG nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 565.388252] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.463s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.391042] env[62974]: INFO nova.compute.claims [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.611549] env[62974]: DEBUG nova.network.neutron [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Updated VIF entry in instance network info cache for port 44fa1a20-5950-4b22-8e9b-213c4323f03f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 565.612114] env[62974]: DEBUG nova.network.neutron [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Updating instance_info_cache with network_info: [{"id": "44fa1a20-5950-4b22-8e9b-213c4323f03f", "address": "fa:16:3e:4d:45:91", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44fa1a20-59", "ovs_interfaceid": "44fa1a20-5950-4b22-8e9b-213c4323f03f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.623569] env[62974]: INFO nova.compute.manager [-] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Took 1.97 seconds to deallocate network for instance. [ 565.679069] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653669, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.679069] env[62974]: DEBUG oslo_vmware.api [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653666, 'name': PowerOnVM_Task, 'duration_secs': 1.165808} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.679653] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 565.679653] env[62974]: INFO nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Took 17.15 seconds to spawn the instance on the hypervisor. [ 565.679770] env[62974]: DEBUG nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 565.680699] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e111bb2a-4c3b-4061-ae7e-9d5833fe2f81 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.699046] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653667, 'name': PowerOnVM_Task, 'duration_secs': 1.13044} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.699046] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 565.699046] env[62974]: INFO nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Took 12.38 seconds to spawn the instance on the hypervisor. [ 565.699365] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 565.701022] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cff05f3-24f5-40cb-b9b9-32f6efb0078b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.733555] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653670, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.863888] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653671, 'name': Rename_Task, 'duration_secs': 0.296316} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.864210] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 565.864455] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77676081-55e3-44b5-a536-f6980a4986a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.872789] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 565.872789] env[62974]: value = "task-2653672" [ 565.872789] env[62974]: _type = "Task" [ 565.872789] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.890480] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.894960] env[62974]: DEBUG nova.compute.utils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 565.899025] env[62974]: DEBUG nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 565.899127] env[62974]: DEBUG nova.network.neutron [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 565.974690] env[62974]: DEBUG nova.policy [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49d8e3a243d346e8969ba6f325e7787e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9087d01b1ad748e0a66474953dfe7034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 566.118861] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Releasing lock "refresh_cache-2a498460-fced-410b-8b33-3595a2ac6753" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.119125] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received event network-vif-plugged-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 566.119175] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Acquiring lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.123031] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.123031] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.123031] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] No waiting events found dispatching network-vif-plugged-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 566.123031] env[62974]: WARNING nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received unexpected event network-vif-plugged-7dbab348-e4dd-46db-ae81-292fbfcd16dc for instance with vm_state building and task_state spawning. [ 566.123031] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 566.123390] env[62974]: DEBUG nova.compute.manager [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing instance network info cache due to event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 566.123390] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Acquiring lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.123390] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Acquired lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.123390] env[62974]: DEBUG nova.network.neutron [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 566.135981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.160659] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537606} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.161081] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a63aa120-1c7b-4abc-93cf-4d138f5cebde/a63aa120-1c7b-4abc-93cf-4d138f5cebde.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 566.162034] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 566.162384] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19e1191c-1794-48a4-bc75-589d985e0c0c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.179894] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 566.179894] env[62974]: value = "task-2653673" [ 566.179894] env[62974]: _type = "Task" [ 566.179894] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.189607] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653673, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.209223] env[62974]: INFO nova.compute.manager [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Took 25.61 seconds to build instance. [ 566.226851] env[62974]: INFO nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Took 20.70 seconds to build instance. [ 566.234047] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653670, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.360417] env[62974]: DEBUG nova.network.neutron [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Successfully updated port: c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 566.386915] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653672, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.400311] env[62974]: DEBUG nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 566.593360] env[62974]: DEBUG nova.network.neutron [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Successfully created port: 7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.701267] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653673, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167697} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.701614] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 566.703246] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8010428a-f1c5-4bec-8739-b6675ad14077 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.720962] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d567c43e-0009-4c66-8d7d-a7891480ddba tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "2313468e-820f-4fff-bdeb-5d542c94584d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.165s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.730643] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] a63aa120-1c7b-4abc-93cf-4d138f5cebde/a63aa120-1c7b-4abc-93cf-4d138f5cebde.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 566.733918] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df85e138-835d-4473-928a-d0de9f45e148 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.759152] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "a8446718-f2df-4bad-b5e3-537f19daa823" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.265s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.770131] env[62974]: DEBUG oslo_vmware.api [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2653670, 'name': PowerOnVM_Task, 'duration_secs': 1.017892} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.770737] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 566.770737] env[62974]: value = "task-2653674" [ 566.770737] env[62974]: _type = "Task" [ 566.770737] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.772449] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 566.772449] env[62974]: INFO nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Took 15.80 seconds to spawn the instance on the hypervisor. [ 566.772668] env[62974]: DEBUG nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 566.773685] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f52d02b-8d8a-4f98-86ad-b306a7d77e3a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.778561] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a885da4d-ad9c-4288-b2dc-a3363537062b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.795410] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec792f36-9b6c-4023-b085-084ac9588b40 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.799336] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.836355] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0220277-b3ee-4b2b-9fec-99a1ad245d94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.845497] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623aeac6-3fa6-4e34-a3cd-13f37d9fbdc6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.871307] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.871307] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquired lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.871393] env[62974]: DEBUG nova.network.neutron [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 566.873228] env[62974]: DEBUG nova.compute.provider_tree [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.892857] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653672, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.100553] env[62974]: DEBUG nova.network.neutron [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updated VIF entry in instance network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 567.101408] env[62974]: DEBUG nova.network.neutron [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [{"id": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "address": "fa:16:3e:cb:fb:0a", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dbab348-e4", "ovs_interfaceid": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.292147] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.319056] env[62974]: INFO nova.compute.manager [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Took 24.89 seconds to build instance. [ 567.377048] env[62974]: DEBUG nova.scheduler.client.report [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 567.394862] env[62974]: DEBUG oslo_vmware.api [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653672, 'name': PowerOnVM_Task, 'duration_secs': 1.248273} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.395226] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 567.395369] env[62974]: INFO nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Took 11.67 seconds to spawn the instance on the hypervisor. [ 567.395516] env[62974]: DEBUG nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 567.396457] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b70e26b-e716-4178-9e3d-105cdfd49866 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.405930] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "586a3541-060f-4859-8507-17faa637b17e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.406300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "586a3541-060f-4859-8507-17faa637b17e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.418510] env[62974]: DEBUG nova.network.neutron [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.428907] env[62974]: DEBUG nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 567.461044] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 567.461738] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.461738] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 567.461967] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.462055] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 567.462263] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 567.462540] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 567.462747] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 567.462940] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 567.463173] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 567.463475] env[62974]: DEBUG nova.virt.hardware [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 567.464431] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2270b2b-7779-40e5-ad5a-4967481055b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.476850] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047c3910-9055-4085-9da1-6f8c862c04a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.589768] env[62974]: DEBUG nova.network.neutron [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updating instance_info_cache with network_info: [{"id": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "address": "fa:16:3e:be:01:c4", "network": {"id": "f0afd336-13eb-49da-8643-c6a4c51451d7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-622006440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf9a2a44db94217bdd7652ef27b5737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1dbf093-9a", "ovs_interfaceid": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.607358] env[62974]: DEBUG oslo_concurrency.lockutils [req-e94d05ba-6e09-4907-beca-c6162103dc8b req-806354f7-f633-4bf2-9f69-532c94e48238 service nova] Releasing lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.786803] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653674, 'name': ReconfigVM_Task, 'duration_secs': 0.742606} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.787178] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Reconfigured VM instance instance-0000000a to attach disk [datastore1] a63aa120-1c7b-4abc-93cf-4d138f5cebde/a63aa120-1c7b-4abc-93cf-4d138f5cebde.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 567.789511] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cefada65-256e-4c2c-9dae-612eaac17788 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.794842] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 567.794842] env[62974]: value = "task-2653675" [ 567.794842] env[62974]: _type = "Task" [ 567.794842] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.805300] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653675, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.826515] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cf5adcf6-aeb4-4637-afcb-6652a46a9a4b tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.411s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.828262] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "ecde0e49-c344-4003-b858-8312c1ac344f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.828650] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "ecde0e49-c344-4003-b858-8312c1ac344f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.885540] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.886057] env[62974]: DEBUG nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 567.891711] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.898s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.891711] env[62974]: INFO nova.compute.claims [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.913960] env[62974]: DEBUG nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 567.925072] env[62974]: INFO nova.compute.manager [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Took 22.37 seconds to build instance. [ 568.095034] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Releasing lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.095034] env[62974]: DEBUG nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Instance network_info: |[{"id": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "address": "fa:16:3e:be:01:c4", "network": {"id": "f0afd336-13eb-49da-8643-c6a4c51451d7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-622006440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf9a2a44db94217bdd7652ef27b5737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1dbf093-9a", "ovs_interfaceid": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 568.095690] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:01:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f49a7d-c6e5-404f-b71a-91d8c070cd18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1dbf093-9abb-4c1d-a4bc-163058074d4f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.106628] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Creating folder: Project (adf9a2a44db94217bdd7652ef27b5737). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.106945] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a128da4-0661-4f7a-912e-cbb18c848c0b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.118614] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Created folder: Project (adf9a2a44db94217bdd7652ef27b5737) in parent group-v535199. [ 568.118910] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Creating folder: Instances. Parent ref: group-v535228. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.119214] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6030425-f04a-4308-b84b-8973d8909910 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.132145] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Created folder: Instances in parent group-v535228. [ 568.132145] env[62974]: DEBUG oslo.service.loopingcall [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.132145] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 568.132145] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37577bb6-4578-4f7f-9f58-6aea3e180c57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.154187] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.154187] env[62974]: value = "task-2653678" [ 568.154187] env[62974]: _type = "Task" [ 568.154187] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.164712] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653678, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.307029] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653675, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.332438] env[62974]: DEBUG nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 568.395325] env[62974]: DEBUG nova.compute.utils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 568.403085] env[62974]: DEBUG nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 568.403239] env[62974]: DEBUG nova.network.neutron [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 568.428379] env[62974]: DEBUG oslo_concurrency.lockutils [None req-94918d74-c0cd-46ee-a242-34d88c279fef tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2a498460-fced-410b-8b33-3595a2ac6753" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.893s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.450990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.462889] env[62974]: DEBUG nova.policy [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7699b20ac86649a0ac3c3466cf7fd840', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c03512b9d335431cbc77d3d599ce3de5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 568.466527] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "6dc914e9-bce5-4a19-a919-ae94981ea800" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.466527] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.671165] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653678, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.805141] env[62974]: DEBUG nova.network.neutron [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Successfully created port: ebd2d4e4-f1df-4022-a6b8-66224fadfb3d {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 568.813013] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653675, 'name': Rename_Task, 'duration_secs': 0.988216} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.813332] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 568.813585] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b32004d-4b5f-4adc-8035-548ec4f420c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.821482] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 568.821482] env[62974]: value = "task-2653679" [ 568.821482] env[62974]: _type = "Task" [ 568.821482] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.831073] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.857629] env[62974]: DEBUG nova.network.neutron [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Successfully updated port: 7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 568.874263] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.902562] env[62974]: DEBUG nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 568.969357] env[62974]: DEBUG nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 569.171045] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653678, 'name': CreateVM_Task, 'duration_secs': 0.567734} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.173799] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 569.174736] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.174873] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.175267] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.175971] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c814ec67-7357-4028-9fc5-c09906e9c604 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.181680] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 569.181680] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ec1e65-bfe2-0030-2dab-9e622ff6c306" [ 569.181680] env[62974]: _type = "Task" [ 569.181680] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.192130] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ec1e65-bfe2-0030-2dab-9e622ff6c306, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.333734] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653679, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.341783] env[62974]: DEBUG nova.compute.manager [req-6b15947d-f5b4-410f-bd27-0e74586178d7 req-df195269-cf26-437e-8252-5523d2825c98 service nova] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Received event network-vif-deleted-cf420179-c3b4-4a7d-bf15-a2bcdac8faae {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 569.350919] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644dbe9b-7566-412f-8465-a8792ebdb75c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.358840] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b73e74-4385-42b7-8b7a-3611d061a0e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.363391] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.363545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.363681] env[62974]: DEBUG nova.network.neutron [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 569.365624] env[62974]: DEBUG nova.compute.manager [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Received event network-vif-plugged-c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 569.365882] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] Acquiring lock "001557f9-ea50-4e86-9eeb-dd4436791453-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.366153] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] Lock "001557f9-ea50-4e86-9eeb-dd4436791453-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.366341] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] Lock "001557f9-ea50-4e86-9eeb-dd4436791453-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.366506] env[62974]: DEBUG nova.compute.manager [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] No waiting events found dispatching network-vif-plugged-c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 569.366670] env[62974]: WARNING nova.compute.manager [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Received unexpected event network-vif-plugged-c1dbf093-9abb-4c1d-a4bc-163058074d4f for instance with vm_state building and task_state spawning. [ 569.366842] env[62974]: DEBUG nova.compute.manager [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Received event network-changed-c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 569.366993] env[62974]: DEBUG nova.compute.manager [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Refreshing instance network info cache due to event network-changed-c1dbf093-9abb-4c1d-a4bc-163058074d4f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 569.367214] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] Acquiring lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.367351] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] Acquired lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.367503] env[62974]: DEBUG nova.network.neutron [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Refreshing network info cache for port c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 569.402317] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2fa98a-0d0b-46f7-a711-81d74a8cf586 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.419622] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5340da-bae1-4384-b0e9-3ee19afd5fb9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.433468] env[62974]: DEBUG nova.compute.provider_tree [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.458338] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "b3827c67-9075-4a53-9f9e-8651e3f4b211" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.458482] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.498569] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.555235] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "1873faa1-dec2-4d17-a71a-c53fea50c09b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.555508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.640307] env[62974]: DEBUG nova.compute.manager [None req-06d3b49e-c3a1-4784-8121-3fe3431ad751 tempest-ServerDiagnosticsTest-1872259400 tempest-ServerDiagnosticsTest-1872259400-project-admin] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 569.641712] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3164e8cd-16ea-453c-96c2-133eb2c3d880 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.649590] env[62974]: INFO nova.compute.manager [None req-06d3b49e-c3a1-4784-8121-3fe3431ad751 tempest-ServerDiagnosticsTest-1872259400 tempest-ServerDiagnosticsTest-1872259400-project-admin] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Retrieving diagnostics [ 569.650618] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baec34ca-13a7-43e2-b9a1-fd170a6f8b9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.693684] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ec1e65-bfe2-0030-2dab-9e622ff6c306, 'name': SearchDatastore_Task, 'duration_secs': 0.010945} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.693983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.694228] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.694459] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.694602] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.694776] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 569.695047] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9725a7e9-ced4-41ac-b97c-911464587445 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.703321] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 569.703509] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 569.704214] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a2d5713-10b9-4f43-b34f-e5553dcd4c1e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.711151] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 569.711151] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e08f6d-536d-7e53-79f1-5baa366b34d2" [ 569.711151] env[62974]: _type = "Task" [ 569.711151] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.719250] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e08f6d-536d-7e53-79f1-5baa366b34d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.832389] env[62974]: DEBUG oslo_vmware.api [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653679, 'name': PowerOnVM_Task, 'duration_secs': 0.844594} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.832658] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 569.832837] env[62974]: INFO nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Took 11.78 seconds to spawn the instance on the hypervisor. [ 569.833026] env[62974]: DEBUG nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 569.833810] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e44b703-adae-4d34-9f46-3f6f6e4fc101 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.911712] env[62974]: DEBUG nova.network.neutron [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.914308] env[62974]: DEBUG nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 569.939970] env[62974]: DEBUG nova.scheduler.client.report [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 569.958801] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 569.959142] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.959327] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 569.959538] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.959690] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 569.960697] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 569.960927] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 569.961123] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 569.961297] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 569.961459] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 569.961629] env[62974]: DEBUG nova.virt.hardware [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 569.962015] env[62974]: DEBUG nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 569.965842] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28af3b58-926c-4b71-a3da-d4edde5c0996 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.976043] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9128a9a2-52fd-4cf4-a364-fdf70f0f281a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.089860] env[62974]: DEBUG nova.network.neutron [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updated VIF entry in instance network info cache for port c1dbf093-9abb-4c1d-a4bc-163058074d4f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 570.089860] env[62974]: DEBUG nova.network.neutron [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updating instance_info_cache with network_info: [{"id": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "address": "fa:16:3e:be:01:c4", "network": {"id": "f0afd336-13eb-49da-8643-c6a4c51451d7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-622006440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf9a2a44db94217bdd7652ef27b5737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1dbf093-9a", "ovs_interfaceid": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.103530] env[62974]: DEBUG nova.network.neutron [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [{"id": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "address": "fa:16:3e:34:87:aa", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b21ba-e0", "ovs_interfaceid": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.225612] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e08f6d-536d-7e53-79f1-5baa366b34d2, 'name': SearchDatastore_Task, 'duration_secs': 0.009554} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.230017] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f4e4485-b220-43b1-8cdb-19196b8422e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.233396] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 570.233396] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52976127-e24a-9262-d8b1-277f99ba76fc" [ 570.233396] env[62974]: _type = "Task" [ 570.233396] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.242940] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52976127-e24a-9262-d8b1-277f99ba76fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.357077] env[62974]: INFO nova.compute.manager [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Took 23.91 seconds to build instance. [ 570.453374] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.453999] env[62974]: DEBUG nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 570.459135] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.170s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.495938] env[62974]: DEBUG nova.network.neutron [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Successfully updated port: ebd2d4e4-f1df-4022-a6b8-66224fadfb3d {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 570.502696] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.594274] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bb17d8f-9351-4e8e-8f61-2f5d42805c89 req-96f330ab-cc97-487f-a562-5179f3948109 service nova] Releasing lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.605513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.605836] env[62974]: DEBUG nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Instance network_info: |[{"id": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "address": "fa:16:3e:34:87:aa", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b21ba-e0", "ovs_interfaceid": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 570.606335] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:87:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e4b21ba-e0f2-4104-8f46-57871fd6ed16', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.616019] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating folder: Project (9087d01b1ad748e0a66474953dfe7034). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.616019] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e4bb818-1f49-420a-bafa-0e0e7f3a5507 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.625404] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created folder: Project (9087d01b1ad748e0a66474953dfe7034) in parent group-v535199. [ 570.625597] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating folder: Instances. Parent ref: group-v535231. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.625827] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30257727-14c6-498f-a6cc-03482b0e6862 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.634840] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created folder: Instances in parent group-v535231. [ 570.635085] env[62974]: DEBUG oslo.service.loopingcall [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 570.635289] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 570.635485] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-832d9d61-9c5f-45b4-91f7-aba1c726fe23 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.656284] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.656284] env[62974]: value = "task-2653682" [ 570.656284] env[62974]: _type = "Task" [ 570.656284] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.664365] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653682, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.743622] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52976127-e24a-9262-d8b1-277f99ba76fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010893} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.743924] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.744243] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 001557f9-ea50-4e86-9eeb-dd4436791453/001557f9-ea50-4e86-9eeb-dd4436791453.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 570.744533] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99bef5b6-86d4-4a3b-a08a-7a89f1bd62f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.750516] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 570.750516] env[62974]: value = "task-2653683" [ 570.750516] env[62974]: _type = "Task" [ 570.750516] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.758630] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.784315] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "05742180-08db-45db-9ee0-e359aa8af2f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.784624] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "05742180-08db-45db-9ee0-e359aa8af2f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.860846] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7b3be378-dc64-45d2-bd27-32fba0176986 tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.476s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.966290] env[62974]: DEBUG nova.compute.utils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 570.972581] env[62974]: INFO nova.compute.claims [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.977192] env[62974]: DEBUG nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 570.977192] env[62974]: DEBUG nova.network.neutron [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 571.002699] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "refresh_cache-7f0d367d-9d60-414b-990e-56a2b43fd963" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.002904] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquired lock "refresh_cache-7f0d367d-9d60-414b-990e-56a2b43fd963" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.003067] env[62974]: DEBUG nova.network.neutron [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 571.027663] env[62974]: DEBUG nova.policy [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2cd82189b18643f28b8364c5de998e70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2533bd1aab82429f8e7f4eb68cbc94e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 571.173938] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653682, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.265643] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653683, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.348654] env[62974]: DEBUG nova.network.neutron [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Successfully created port: 47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 571.363508] env[62974]: DEBUG nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 571.478167] env[62974]: DEBUG nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 571.484165] env[62974]: INFO nova.compute.resource_tracker [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating resource usage from migration d84a9086-86d7-445d-b99f-b1d247f1cb7c [ 571.545144] env[62974]: DEBUG nova.network.neutron [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.671255] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653682, 'name': CreateVM_Task, 'duration_secs': 0.592316} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.674417] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 571.675743] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.675743] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.676129] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 571.676354] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a07da835-da56-4f8d-8b5e-6c724f6e39bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.684722] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 571.684722] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52640063-155b-5777-2e69-c26ed91ac736" [ 571.684722] env[62974]: _type = "Task" [ 571.684722] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.698781] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52640063-155b-5777-2e69-c26ed91ac736, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.762939] env[62974]: DEBUG nova.network.neutron [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Updating instance_info_cache with network_info: [{"id": "ebd2d4e4-f1df-4022-a6b8-66224fadfb3d", "address": "fa:16:3e:33:82:c9", "network": {"id": "dcbeb97d-31aa-46ca-87b9-ae4c05878b9f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2146140239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c03512b9d335431cbc77d3d599ce3de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebd2d4e4-f1", "ovs_interfaceid": "ebd2d4e4-f1df-4022-a6b8-66224fadfb3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.771118] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636602} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.771118] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 001557f9-ea50-4e86-9eeb-dd4436791453/001557f9-ea50-4e86-9eeb-dd4436791453.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 571.771118] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 571.771118] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75dee110-7914-4370-a350-cfa0fa41881d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.778432] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 571.778432] env[62974]: value = "task-2653684" [ 571.778432] env[62974]: _type = "Task" [ 571.778432] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.790738] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653684, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.811329] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.811573] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.879938] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25dbedf0-5e92-4970-881a-9d5ea95f7518 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.888059] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98434e19-d640-4380-81e6-19307f2b1575 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.920134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.921495] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1420f4-f223-4d0c-ad7e-77eb47ddb223 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.929907] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90667889-929c-46dd-9f4d-bf5eff95ade7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.945943] env[62974]: DEBUG nova.compute.provider_tree [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.123446] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.123946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.199070] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52640063-155b-5777-2e69-c26ed91ac736, 'name': SearchDatastore_Task, 'duration_secs': 0.010087} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.199560] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.199985] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.200405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.200660] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.200931] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.201800] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bde3d2f-8c15-4832-9f61-b205372db6f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.212187] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.212696] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 572.214075] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aacbc9c6-c0b2-40f2-8f77-2e6caa83738e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.221826] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 572.221826] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1bd4d-ab38-67b9-e2cf-aacf00f3b86a" [ 572.221826] env[62974]: _type = "Task" [ 572.221826] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.231487] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1bd4d-ab38-67b9-e2cf-aacf00f3b86a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.266802] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Releasing lock "refresh_cache-7f0d367d-9d60-414b-990e-56a2b43fd963" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.269236] env[62974]: DEBUG nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Instance network_info: |[{"id": "ebd2d4e4-f1df-4022-a6b8-66224fadfb3d", "address": "fa:16:3e:33:82:c9", "network": {"id": "dcbeb97d-31aa-46ca-87b9-ae4c05878b9f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2146140239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c03512b9d335431cbc77d3d599ce3de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebd2d4e4-f1", "ovs_interfaceid": "ebd2d4e4-f1df-4022-a6b8-66224fadfb3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 572.269400] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:82:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be8bd197-4b2b-46e7-88ea-2554b0438584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebd2d4e4-f1df-4022-a6b8-66224fadfb3d', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.276609] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Creating folder: Project (c03512b9d335431cbc77d3d599ce3de5). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 572.277053] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff90258b-f384-4e3b-9569-0101237c26d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.292262] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075935} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.292262] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 572.292262] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed55c0e-2979-4d25-8d56-14e4df03c29c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.294765] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Created folder: Project (c03512b9d335431cbc77d3d599ce3de5) in parent group-v535199. [ 572.295084] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Creating folder: Instances. Parent ref: group-v535234. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 572.295697] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4945c70-da78-4e8a-b69c-2d132b2753d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.316471] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 001557f9-ea50-4e86-9eeb-dd4436791453/001557f9-ea50-4e86-9eeb-dd4436791453.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 572.317199] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d74660b-0c75-4ab8-9d13-564509676044 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.332662] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Created folder: Instances in parent group-v535234. [ 572.332949] env[62974]: DEBUG oslo.service.loopingcall [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.333574] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 572.333929] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92b8ef5e-71cb-41a2-82c6-82f01133ab3b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.350404] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 572.350404] env[62974]: value = "task-2653687" [ 572.350404] env[62974]: _type = "Task" [ 572.350404] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.355632] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.355632] env[62974]: value = "task-2653688" [ 572.355632] env[62974]: _type = "Task" [ 572.355632] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.361524] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653687, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.366130] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653688, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.383934] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "2313468e-820f-4fff-bdeb-5d542c94584d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.384192] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "2313468e-820f-4fff-bdeb-5d542c94584d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.384469] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "2313468e-820f-4fff-bdeb-5d542c94584d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.384775] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "2313468e-820f-4fff-bdeb-5d542c94584d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.385012] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "2313468e-820f-4fff-bdeb-5d542c94584d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.388151] env[62974]: INFO nova.compute.manager [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Terminating instance [ 572.449502] env[62974]: DEBUG nova.scheduler.client.report [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 572.487912] env[62974]: DEBUG nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 572.520372] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 572.520636] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 572.520855] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 572.521648] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 572.521845] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 572.521997] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 572.522224] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 572.522583] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 572.522805] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 572.522979] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 572.523167] env[62974]: DEBUG nova.virt.hardware [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 572.524156] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb0849f-a318-4dd5-a4d1-19098d7b238c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.534148] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d99798-e1f0-48a9-9d62-6c84a720ddd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.637231] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.637466] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.637642] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.637761] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.637901] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.640738] env[62974]: INFO nova.compute.manager [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Terminating instance [ 572.736422] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1bd4d-ab38-67b9-e2cf-aacf00f3b86a, 'name': SearchDatastore_Task, 'duration_secs': 0.009946} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.737270] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f058eccf-ba38-4373-9f9e-224d5b6f1762 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.745852] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 572.745852] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52702026-efe9-ec20-2f22-09326cb5ed07" [ 572.745852] env[62974]: _type = "Task" [ 572.745852] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.752626] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52702026-efe9-ec20-2f22-09326cb5ed07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.870772] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653687, 'name': ReconfigVM_Task, 'duration_secs': 0.481935} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.873954] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 001557f9-ea50-4e86-9eeb-dd4436791453/001557f9-ea50-4e86-9eeb-dd4436791453.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 572.875106] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653688, 'name': CreateVM_Task, 'duration_secs': 0.446486} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.875300] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73b4dc02-743f-4a05-b379-d79faa6f85c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.877501] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 572.877501] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.877664] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.877975] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.878557] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99d07382-955d-4fc6-8e94-a87dc84a7964 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.882100] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 572.882100] env[62974]: value = "task-2653689" [ 572.882100] env[62974]: _type = "Task" [ 572.882100] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.886099] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 572.886099] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5299c2f0-ce29-3297-a8e6-5d8ca7ce79d6" [ 572.886099] env[62974]: _type = "Task" [ 572.886099] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.893445] env[62974]: DEBUG nova.compute.manager [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 572.893697] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 572.893981] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653689, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.895185] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bec5e6-4fae-4279-a410-ee3a1aabea0c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.901438] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5299c2f0-ce29-3297-a8e6-5d8ca7ce79d6, 'name': SearchDatastore_Task, 'duration_secs': 0.009415} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.902069] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.902307] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.902521] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.906358] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 572.906974] env[62974]: DEBUG nova.network.neutron [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Successfully updated port: 47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.908144] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3493a6d-a0e2-4c97-9204-10e5fcce3f60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.917512] env[62974]: DEBUG oslo_vmware.api [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 572.917512] env[62974]: value = "task-2653690" [ 572.917512] env[62974]: _type = "Task" [ 572.917512] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.927618] env[62974]: DEBUG oslo_vmware.api [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.954905] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.498s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.955375] env[62974]: INFO nova.compute.manager [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Migrating [ 572.955375] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.955627] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.960690] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.898s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.962753] env[62974]: INFO nova.compute.claims [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 573.146144] env[62974]: DEBUG nova.compute.manager [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 573.147927] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 573.148545] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed57d9b8-d7f8-4ad1-b0f1-74f08ba073a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.161725] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 573.162213] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82846740-94db-4008-b326-e1a9f635df7c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.170880] env[62974]: DEBUG oslo_vmware.api [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 573.170880] env[62974]: value = "task-2653691" [ 573.170880] env[62974]: _type = "Task" [ 573.170880] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.182174] env[62974]: DEBUG oslo_vmware.api [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653691, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.256901] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52702026-efe9-ec20-2f22-09326cb5ed07, 'name': SearchDatastore_Task, 'duration_secs': 0.010192} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.257251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.257705] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] cf73422d-7f4b-4bae-9d69-de74d7211243/cf73422d-7f4b-4bae-9d69-de74d7211243.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 573.258396] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.258396] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.258523] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49837c8f-7d48-4b3a-b096-0e3a1d9f02c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.260720] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36f8612a-0450-48f5-968e-7ce8f5d37e3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.267836] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 573.267836] env[62974]: value = "task-2653692" [ 573.267836] env[62974]: _type = "Task" [ 573.267836] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.271962] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.272152] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 573.273176] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9dd27cf-8d36-4b8d-baac-2601d5060d70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.280769] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.283751] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 573.283751] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f404c3-2493-da41-1f86-6ec7d489a880" [ 573.283751] env[62974]: _type = "Task" [ 573.283751] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.295657] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f404c3-2493-da41-1f86-6ec7d489a880, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.392557] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653689, 'name': Rename_Task, 'duration_secs': 0.257857} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.392910] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 573.395695] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de524a3f-b84c-4f5c-9f63-a89ab04f4ac5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.401338] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 573.401338] env[62974]: value = "task-2653693" [ 573.401338] env[62974]: _type = "Task" [ 573.401338] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.408964] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.410522] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.410739] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquired lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.410815] env[62974]: DEBUG nova.network.neutron [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 573.427023] env[62974]: DEBUG oslo_vmware.api [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653690, 'name': PowerOffVM_Task, 'duration_secs': 0.260182} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.428504] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 573.428742] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 573.429430] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94f6aac6-e7b6-4a6a-8517-40af39490586 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.461477] env[62974]: INFO nova.compute.rpcapi [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 573.462244] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.505604] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 573.506178] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 573.506380] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Deleting the datastore file [datastore1] 2313468e-820f-4fff-bdeb-5d542c94584d {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 573.506772] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbe9d9a3-fbcc-4698-b27c-950aabe15493 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.516487] env[62974]: DEBUG oslo_vmware.api [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for the task: (returnval){ [ 573.516487] env[62974]: value = "task-2653695" [ 573.516487] env[62974]: _type = "Task" [ 573.516487] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.528944] env[62974]: DEBUG oslo_vmware.api [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.630371] env[62974]: DEBUG nova.compute.manager [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Received event network-vif-plugged-7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 573.630371] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] Acquiring lock "cf73422d-7f4b-4bae-9d69-de74d7211243-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.630652] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.630716] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.631023] env[62974]: DEBUG nova.compute.manager [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] No waiting events found dispatching network-vif-plugged-7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 573.631214] env[62974]: WARNING nova.compute.manager [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Received unexpected event network-vif-plugged-7e4b21ba-e0f2-4104-8f46-57871fd6ed16 for instance with vm_state building and task_state spawning. [ 573.631440] env[62974]: DEBUG nova.compute.manager [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Received event network-changed-7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 573.631680] env[62974]: DEBUG nova.compute.manager [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Refreshing instance network info cache due to event network-changed-7e4b21ba-e0f2-4104-8f46-57871fd6ed16. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 573.632335] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] Acquiring lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.632335] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] Acquired lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.632495] env[62974]: DEBUG nova.network.neutron [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Refreshing network info cache for port 7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 573.653619] env[62974]: DEBUG nova.compute.manager [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Received event network-vif-plugged-ebd2d4e4-f1df-4022-a6b8-66224fadfb3d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 573.653846] env[62974]: DEBUG oslo_concurrency.lockutils [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] Acquiring lock "7f0d367d-9d60-414b-990e-56a2b43fd963-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.654256] env[62974]: DEBUG oslo_concurrency.lockutils [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.654256] env[62974]: DEBUG oslo_concurrency.lockutils [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.654829] env[62974]: DEBUG nova.compute.manager [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] No waiting events found dispatching network-vif-plugged-ebd2d4e4-f1df-4022-a6b8-66224fadfb3d {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 573.654829] env[62974]: WARNING nova.compute.manager [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Received unexpected event network-vif-plugged-ebd2d4e4-f1df-4022-a6b8-66224fadfb3d for instance with vm_state building and task_state spawning. [ 573.654829] env[62974]: DEBUG nova.compute.manager [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Received event network-changed-ebd2d4e4-f1df-4022-a6b8-66224fadfb3d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 573.655112] env[62974]: DEBUG nova.compute.manager [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Refreshing instance network info cache due to event network-changed-ebd2d4e4-f1df-4022-a6b8-66224fadfb3d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 573.655423] env[62974]: DEBUG oslo_concurrency.lockutils [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] Acquiring lock "refresh_cache-7f0d367d-9d60-414b-990e-56a2b43fd963" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.655607] env[62974]: DEBUG oslo_concurrency.lockutils [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] Acquired lock "refresh_cache-7f0d367d-9d60-414b-990e-56a2b43fd963" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.655766] env[62974]: DEBUG nova.network.neutron [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Refreshing network info cache for port ebd2d4e4-f1df-4022-a6b8-66224fadfb3d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 573.682892] env[62974]: DEBUG oslo_vmware.api [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653691, 'name': PowerOffVM_Task, 'duration_secs': 0.223003} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.683217] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 573.683389] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 573.683648] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e6f7406-be97-4544-8df2-343cfdd773c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.753241] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 573.753585] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 573.753802] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Deleting the datastore file [datastore2] 8f4faa77-4f18-41da-b8d0-efba799d6ec6 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 573.754173] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f830180-1a11-4aef-9e79-062a462ebc91 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.763903] env[62974]: DEBUG oslo_vmware.api [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for the task: (returnval){ [ 573.763903] env[62974]: value = "task-2653697" [ 573.763903] env[62974]: _type = "Task" [ 573.763903] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.779378] env[62974]: DEBUG oslo_vmware.api [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.783050] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653692, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.795502] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f404c3-2493-da41-1f86-6ec7d489a880, 'name': SearchDatastore_Task, 'duration_secs': 0.00885} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.796398] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00d24797-e009-4636-aff8-2e27ca82dbf5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.802099] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 573.802099] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5206d959-2e2f-6fd0-c559-2079266e3eb1" [ 573.802099] env[62974]: _type = "Task" [ 573.802099] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.810981] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5206d959-2e2f-6fd0-c559-2079266e3eb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.911748] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653693, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.954417] env[62974]: DEBUG nova.network.neutron [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.984530] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.984530] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.984729] env[62974]: DEBUG nova.network.neutron [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 574.026798] env[62974]: DEBUG oslo_vmware.api [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Task: {'id': task-2653695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337087} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.027500] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 574.027691] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 574.027691] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 574.027845] env[62974]: INFO nova.compute.manager [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 574.028103] env[62974]: DEBUG oslo.service.loopingcall [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.031142] env[62974]: DEBUG nova.compute.manager [-] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 574.031247] env[62974]: DEBUG nova.network.neutron [-] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 574.112510] env[62974]: DEBUG nova.network.neutron [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updating instance_info_cache with network_info: [{"id": "47b61932-1b0f-4b88-9565-96bf61bb3912", "address": "fa:16:3e:57:5f:fc", "network": {"id": "5ff66071-9852-4e55-abe0-836a3842a025", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2009514308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2533bd1aab82429f8e7f4eb68cbc94e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b61932-1b", "ovs_interfaceid": "47b61932-1b0f-4b88-9565-96bf61bb3912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.214213] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "a8446718-f2df-4bad-b5e3-537f19daa823" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.216020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "a8446718-f2df-4bad-b5e3-537f19daa823" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.216020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "a8446718-f2df-4bad-b5e3-537f19daa823-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.216020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "a8446718-f2df-4bad-b5e3-537f19daa823-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.216020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "a8446718-f2df-4bad-b5e3-537f19daa823-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.217259] env[62974]: INFO nova.compute.manager [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Terminating instance [ 574.286937] env[62974]: DEBUG oslo_vmware.api [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Task: {'id': task-2653697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263983} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.290808] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 574.291112] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 574.291351] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 574.291529] env[62974]: INFO nova.compute.manager [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 574.291815] env[62974]: DEBUG oslo.service.loopingcall [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.292166] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524566} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.295264] env[62974]: DEBUG nova.compute.manager [-] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 574.295373] env[62974]: DEBUG nova.network.neutron [-] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 574.297141] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] cf73422d-7f4b-4bae-9d69-de74d7211243/cf73422d-7f4b-4bae-9d69-de74d7211243.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 574.297366] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 574.297836] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ff758b8-9176-4d56-bc59-1a24cf02ec95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.309122] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 574.309122] env[62974]: value = "task-2653698" [ 574.309122] env[62974]: _type = "Task" [ 574.309122] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.324438] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5206d959-2e2f-6fd0-c559-2079266e3eb1, 'name': SearchDatastore_Task, 'duration_secs': 0.010438} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.325104] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.325450] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 7f0d367d-9d60-414b-990e-56a2b43fd963/7f0d367d-9d60-414b-990e-56a2b43fd963.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 574.328349] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24fe3c01-4d91-4c89-b2cd-795ac5a94858 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.333983] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653698, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.337934] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 574.337934] env[62974]: value = "task-2653699" [ 574.337934] env[62974]: _type = "Task" [ 574.337934] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.346852] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.375318] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b0c164-3cb1-4f1a-adae-c7c906032423 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.385145] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779a0750-2a58-4cea-bc86-65f5a0576ba7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.421140] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53529770-e622-4595-bb20-dc14488ca082 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.427978] env[62974]: DEBUG oslo_vmware.api [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653693, 'name': PowerOnVM_Task, 'duration_secs': 0.825071} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.432905] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 574.433189] env[62974]: INFO nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Took 9.49 seconds to spawn the instance on the hypervisor. [ 574.433441] env[62974]: DEBUG nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 574.434437] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71fd044-627b-408e-9f45-7f5a4206fe50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.438379] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f62bbe-ace3-4715-aa17-b9f7ff9b8e25 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.457377] env[62974]: DEBUG nova.compute.provider_tree [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.618088] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Releasing lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.618411] env[62974]: DEBUG nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Instance network_info: |[{"id": "47b61932-1b0f-4b88-9565-96bf61bb3912", "address": "fa:16:3e:57:5f:fc", "network": {"id": "5ff66071-9852-4e55-abe0-836a3842a025", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2009514308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2533bd1aab82429f8e7f4eb68cbc94e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b61932-1b", "ovs_interfaceid": "47b61932-1b0f-4b88-9565-96bf61bb3912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 574.618900] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:5f:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47b61932-1b0f-4b88-9565-96bf61bb3912', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.626982] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Creating folder: Project (2533bd1aab82429f8e7f4eb68cbc94e8). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 574.627873] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e40b54be-ccb2-4696-a6e1-4ea073be7cc2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.639570] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Created folder: Project (2533bd1aab82429f8e7f4eb68cbc94e8) in parent group-v535199. [ 574.639767] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Creating folder: Instances. Parent ref: group-v535237. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 574.640151] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb2878b2-7420-4669-9063-ec6d1fe8a2e0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.654551] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Created folder: Instances in parent group-v535237. [ 574.654802] env[62974]: DEBUG oslo.service.loopingcall [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.655014] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 574.655275] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58a4aec5-37c5-4674-8b6b-1b0fddd53c74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.682502] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.682502] env[62974]: value = "task-2653702" [ 574.682502] env[62974]: _type = "Task" [ 574.682502] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.693413] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653702, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.727919] env[62974]: DEBUG nova.compute.manager [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 574.728721] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 574.729303] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478b0837-51a9-4176-ac30-394a75e7d05a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.740628] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 574.741067] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08160d6a-2fc7-4c73-9267-4be88eac196e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.749283] env[62974]: DEBUG oslo_vmware.api [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 574.749283] env[62974]: value = "task-2653703" [ 574.749283] env[62974]: _type = "Task" [ 574.749283] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.761715] env[62974]: DEBUG oslo_vmware.api [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.787152] env[62974]: DEBUG nova.network.neutron [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updated VIF entry in instance network info cache for port 7e4b21ba-e0f2-4104-8f46-57871fd6ed16. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 574.787319] env[62974]: DEBUG nova.network.neutron [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [{"id": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "address": "fa:16:3e:34:87:aa", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b21ba-e0", "ovs_interfaceid": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.826281] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653698, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065528} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.826472] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 574.827499] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a4d5c4-76bf-4956-b312-93997343e86c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.834089] env[62974]: DEBUG nova.network.neutron [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Updated VIF entry in instance network info cache for port ebd2d4e4-f1df-4022-a6b8-66224fadfb3d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 574.834089] env[62974]: DEBUG nova.network.neutron [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Updating instance_info_cache with network_info: [{"id": "ebd2d4e4-f1df-4022-a6b8-66224fadfb3d", "address": "fa:16:3e:33:82:c9", "network": {"id": "dcbeb97d-31aa-46ca-87b9-ae4c05878b9f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2146140239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c03512b9d335431cbc77d3d599ce3de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be8bd197-4b2b-46e7-88ea-2554b0438584", "external-id": "nsx-vlan-transportzone-338", "segmentation_id": 338, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebd2d4e4-f1", "ovs_interfaceid": "ebd2d4e4-f1df-4022-a6b8-66224fadfb3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.855599] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] cf73422d-7f4b-4bae-9d69-de74d7211243/cf73422d-7f4b-4bae-9d69-de74d7211243.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 574.858810] env[62974]: DEBUG oslo_concurrency.lockutils [req-a12021ae-fcce-4c40-a0cb-a19ffede324c req-e35b2234-abc1-4b67-8d09-b77eb47b9a46 service nova] Releasing lock "refresh_cache-7f0d367d-9d60-414b-990e-56a2b43fd963" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.862862] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89027938-6faf-442c-b409-b3731523fa84 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.883562] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653699, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509023} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.884865] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 7f0d367d-9d60-414b-990e-56a2b43fd963/7f0d367d-9d60-414b-990e-56a2b43fd963.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 574.885095] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 574.885414] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 574.885414] env[62974]: value = "task-2653704" [ 574.885414] env[62974]: _type = "Task" [ 574.885414] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.885628] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34c5a6f9-2446-432f-9432-6b4d6d44f09c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.894790] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 574.894790] env[62974]: value = "task-2653705" [ 574.894790] env[62974]: _type = "Task" [ 574.894790] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.898477] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653704, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.907109] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653705, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.965389] env[62974]: DEBUG nova.scheduler.client.report [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 574.971942] env[62974]: DEBUG nova.network.neutron [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [{"id": "947659a6-f0ce-4065-a591-6a15666e4ac5", "address": "fa:16:3e:f1:cd:d9", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947659a6-f0", "ovs_interfaceid": "947659a6-f0ce-4065-a591-6a15666e4ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.973750] env[62974]: INFO nova.compute.manager [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Took 20.40 seconds to build instance. [ 575.003826] env[62974]: DEBUG nova.network.neutron [-] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.192532] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653702, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.260254] env[62974]: DEBUG oslo_vmware.api [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653703, 'name': PowerOffVM_Task, 'duration_secs': 0.266665} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.262772] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 575.262772] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 575.262772] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43187c0b-b102-46b7-8dee-021efa2aa6dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.291272] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd6a5bfb-0bfa-43c4-9c4e-849fc2457d69 req-5bb8622a-7459-4c27-8d32-b63b59b2da66 service nova] Releasing lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.336649] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 575.336848] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 575.338946] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleting the datastore file [datastore1] a8446718-f2df-4bad-b5e3-537f19daa823 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 575.338946] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b9446a8-ccd7-4158-b42d-f791faa99518 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.347335] env[62974]: DEBUG oslo_vmware.api [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 575.347335] env[62974]: value = "task-2653707" [ 575.347335] env[62974]: _type = "Task" [ 575.347335] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.357351] env[62974]: DEBUG oslo_vmware.api [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653707, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.405474] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653704, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.413201] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653705, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113315} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.413471] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 575.414345] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e8ddd8-9d53-480b-92fa-9328a5675565 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.438279] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 7f0d367d-9d60-414b-990e-56a2b43fd963/7f0d367d-9d60-414b-990e-56a2b43fd963.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 575.438977] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51096f1a-f637-41a9-a30d-bce952221595 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.460215] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 575.460215] env[62974]: value = "task-2653708" [ 575.460215] env[62974]: _type = "Task" [ 575.460215] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.470289] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653708, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.477563] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.478251] env[62974]: DEBUG nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 575.482653] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.483904] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.882s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.483904] env[62974]: DEBUG nova.objects.instance [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Lazy-loading 'resources' on Instance uuid 572c2c5f-6a24-4532-9c80-d76017e4aaa1 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 575.486076] env[62974]: DEBUG nova.network.neutron [-] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.487374] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d9d5f35-5fb3-4d85-9268-2c2f22b1181b tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "001557f9-ea50-4e86-9eeb-dd4436791453" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.649s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.508203] env[62974]: INFO nova.compute.manager [-] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Took 1.48 seconds to deallocate network for instance. [ 575.693696] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653702, 'name': CreateVM_Task, 'duration_secs': 0.610309} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.693899] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 575.694764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.694991] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.695808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 575.695808] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c7e9edf-2eeb-4dc7-962b-f4c7a3b058f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.700975] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 575.700975] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c5478-f1f6-fb12-9386-ecc4998e822a" [ 575.700975] env[62974]: _type = "Task" [ 575.700975] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.708120] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c5478-f1f6-fb12-9386-ecc4998e822a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.805375] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "2a498460-fced-410b-8b33-3595a2ac6753" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.805720] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2a498460-fced-410b-8b33-3595a2ac6753" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.805960] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "2a498460-fced-410b-8b33-3595a2ac6753-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.806218] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2a498460-fced-410b-8b33-3595a2ac6753-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.806385] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2a498460-fced-410b-8b33-3595a2ac6753-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.809188] env[62974]: INFO nova.compute.manager [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Terminating instance [ 575.859904] env[62974]: DEBUG oslo_vmware.api [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653707, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157733} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.860364] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 575.860700] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 575.860998] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 575.861507] env[62974]: INFO nova.compute.manager [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Took 1.13 seconds to destroy the instance on the hypervisor. [ 575.861790] env[62974]: DEBUG oslo.service.loopingcall [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.861994] env[62974]: DEBUG nova.compute.manager [-] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 575.862704] env[62974]: DEBUG nova.network.neutron [-] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 575.900979] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.901622] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.909194] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653704, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.971908] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653708, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.998298] env[62974]: DEBUG nova.compute.utils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.998298] env[62974]: INFO nova.compute.manager [-] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Took 1.70 seconds to deallocate network for instance. [ 575.998298] env[62974]: DEBUG nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 576.001591] env[62974]: DEBUG nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 576.002113] env[62974]: DEBUG nova.network.neutron [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 576.015295] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.095063] env[62974]: DEBUG nova.policy [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35fbf7e6197b4a5eb3e59e2d7dcb42a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a095f717f7d4c1e81311a0810eed958', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 576.220699] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c5478-f1f6-fb12-9386-ecc4998e822a, 'name': SearchDatastore_Task, 'duration_secs': 0.023306} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.221729] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.221729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 576.221729] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.221974] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.221974] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 576.226711] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40174f98-1e05-4a29-a3a2-d377fa212ddb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.237279] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.237479] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 576.238395] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b14356f7-3f26-4412-9b7c-ebacb4fea7ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.246855] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 576.246855] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52590687-a30e-7505-05c4-67ff0c2ae617" [ 576.246855] env[62974]: _type = "Task" [ 576.246855] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.257541] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52590687-a30e-7505-05c4-67ff0c2ae617, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.316152] env[62974]: DEBUG nova.compute.manager [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 576.316398] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 576.317605] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147f1bbd-3308-43e6-8c25-3a5cab952c35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.326244] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 576.329476] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe8c387c-9698-4577-8f77-6146f3020d28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.335804] env[62974]: DEBUG oslo_vmware.api [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 576.335804] env[62974]: value = "task-2653709" [ 576.335804] env[62974]: _type = "Task" [ 576.335804] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.352037] env[62974]: DEBUG oslo_vmware.api [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653709, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.405839] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653704, 'name': ReconfigVM_Task, 'duration_secs': 1.294509} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.408042] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Reconfigured VM instance instance-0000000c to attach disk [datastore2] cf73422d-7f4b-4bae-9d69-de74d7211243/cf73422d-7f4b-4bae-9d69-de74d7211243.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 576.408042] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ad04b00-b1dd-4c22-97d9-849335062949 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.414033] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 576.414033] env[62974]: value = "task-2653710" [ 576.414033] env[62974]: _type = "Task" [ 576.414033] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.423451] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653710, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.461417] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395f111f-6dc4-4b9d-94c4-98986626176d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.480236] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653708, 'name': ReconfigVM_Task, 'duration_secs': 0.812909} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.481038] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7e6d34-e7b9-4c58-bb81-8611f0e42fa0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.484768] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 7f0d367d-9d60-414b-990e-56a2b43fd963/7f0d367d-9d60-414b-990e-56a2b43fd963.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 576.485392] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75047dd9-ec86-4881-b93f-f9efc89e763b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.527939] env[62974]: DEBUG nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 576.534955] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.540356] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00206cac-b59f-42de-ae4c-5373f87aa74f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.543797] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 576.543797] env[62974]: value = "task-2653711" [ 576.543797] env[62974]: _type = "Task" [ 576.543797] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.553080] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523ebebf-d708-46d4-8561-c3bd3240e5d4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.561847] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653711, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.574483] env[62974]: DEBUG nova.compute.provider_tree [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.577814] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.579648] env[62974]: DEBUG nova.compute.manager [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Received event network-changed-a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 576.580168] env[62974]: DEBUG nova.compute.manager [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Refreshing instance network info cache due to event network-changed-a4073f26-c2d4-4275-aced-337895f21b0c. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 576.580426] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] Acquiring lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.580566] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] Acquired lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.580728] env[62974]: DEBUG nova.network.neutron [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Refreshing network info cache for port a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 576.591871] env[62974]: DEBUG nova.network.neutron [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Successfully created port: 8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 576.607612] env[62974]: DEBUG nova.compute.manager [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Received event network-vif-plugged-47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 576.607912] env[62974]: DEBUG oslo_concurrency.lockutils [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] Acquiring lock "1933bc47-1717-48c1-b4a2-492a17573de7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.608197] env[62974]: DEBUG oslo_concurrency.lockutils [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] Lock "1933bc47-1717-48c1-b4a2-492a17573de7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.608301] env[62974]: DEBUG oslo_concurrency.lockutils [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] Lock "1933bc47-1717-48c1-b4a2-492a17573de7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.608574] env[62974]: DEBUG nova.compute.manager [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] No waiting events found dispatching network-vif-plugged-47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 576.608731] env[62974]: WARNING nova.compute.manager [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Received unexpected event network-vif-plugged-47b61932-1b0f-4b88-9565-96bf61bb3912 for instance with vm_state building and task_state spawning. [ 576.608832] env[62974]: DEBUG nova.compute.manager [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Received event network-changed-47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 576.609275] env[62974]: DEBUG nova.compute.manager [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Refreshing instance network info cache due to event network-changed-47b61932-1b0f-4b88-9565-96bf61bb3912. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 576.609531] env[62974]: DEBUG oslo_concurrency.lockutils [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] Acquiring lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.609531] env[62974]: DEBUG oslo_concurrency.lockutils [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] Acquired lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.609822] env[62974]: DEBUG nova.network.neutron [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Refreshing network info cache for port 47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 576.761024] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52590687-a30e-7505-05c4-67ff0c2ae617, 'name': SearchDatastore_Task, 'duration_secs': 0.008711} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.762655] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf8a9e95-7446-4431-a89d-dfead5b54b51 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.769654] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 576.769654] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52980bcb-5473-0f7f-80cb-5cda50d6614e" [ 576.769654] env[62974]: _type = "Task" [ 576.769654] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.779177] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52980bcb-5473-0f7f-80cb-5cda50d6614e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.801746] env[62974]: DEBUG nova.network.neutron [-] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.845166] env[62974]: DEBUG oslo_vmware.api [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653709, 'name': PowerOffVM_Task, 'duration_secs': 0.231395} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.845442] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 576.845611] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 576.846106] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa27af05-2b81-4e5f-8cb6-9caefd373a75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.911406] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 576.911632] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 576.911809] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleting the datastore file [datastore1] 2a498460-fced-410b-8b33-3595a2ac6753 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 576.912081] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-178aa319-8b73-48f0-8c9e-8663c038070b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.921742] env[62974]: DEBUG oslo_vmware.api [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 576.921742] env[62974]: value = "task-2653713" [ 576.921742] env[62974]: _type = "Task" [ 576.921742] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.928552] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653710, 'name': Rename_Task, 'duration_secs': 0.152213} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.929369] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 576.929561] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d5431da-fa29-4ff4-8e9e-874ca07af9b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.933921] env[62974]: DEBUG oslo_vmware.api [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.937879] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 576.937879] env[62974]: value = "task-2653714" [ 576.937879] env[62974]: _type = "Task" [ 576.937879] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.950755] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653714, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.047533] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8830842-2a8a-4c3d-a6a6-50e236ea9925 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.064623] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653711, 'name': Rename_Task, 'duration_secs': 0.260938} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.080495] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 577.081338] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance 'd8b7a39f-ec73-4a87-9b1e-9428ca72f895' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 577.084971] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fb5adf7-fdd4-41c4-8c6e-a15ac876ac0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.091440] env[62974]: DEBUG nova.scheduler.client.report [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 577.100888] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 577.100888] env[62974]: value = "task-2653715" [ 577.100888] env[62974]: _type = "Task" [ 577.100888] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.111475] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.286065] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52980bcb-5473-0f7f-80cb-5cda50d6614e, 'name': SearchDatastore_Task, 'duration_secs': 0.012567} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.286065] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.289859] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 1933bc47-1717-48c1-b4a2-492a17573de7/1933bc47-1717-48c1-b4a2-492a17573de7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 577.289859] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a27642b-2de0-4a43-b0d2-7ed00df379f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.295216] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 577.295216] env[62974]: value = "task-2653716" [ 577.295216] env[62974]: _type = "Task" [ 577.295216] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.305943] env[62974]: INFO nova.compute.manager [-] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Took 1.44 seconds to deallocate network for instance. [ 577.306348] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.433715] env[62974]: DEBUG oslo_vmware.api [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2653713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136018} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.434016] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 577.434485] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 577.434693] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 577.435713] env[62974]: INFO nova.compute.manager [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Took 1.12 seconds to destroy the instance on the hypervisor. [ 577.435713] env[62974]: DEBUG oslo.service.loopingcall [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.435713] env[62974]: DEBUG nova.compute.manager [-] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 577.435713] env[62974]: DEBUG nova.network.neutron [-] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 577.447630] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653714, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.557235] env[62974]: DEBUG nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 577.589807] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:50:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='431461561',id=24,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-27846064',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 577.590055] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.590233] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 577.590428] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.590789] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 577.590789] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 577.590943] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 577.592614] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 577.592744] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 577.592973] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 577.593182] env[62974]: DEBUG nova.virt.hardware [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 577.594596] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66c6e6d-d20d-41f4-a1b2-484eb9e1e03b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.598540] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.604566] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 577.606892] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.470s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.606892] env[62974]: DEBUG nova.objects.instance [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lazy-loading 'resources' on Instance uuid 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 577.607875] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7345d883-ccdf-4054-809e-579eda0092e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.621323] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a9ceed-5100-464e-a4db-60026b27fb8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.636227] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653715, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.636570] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 577.636570] env[62974]: value = "task-2653717" [ 577.636570] env[62974]: _type = "Task" [ 577.636570] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.637614] env[62974]: INFO nova.scheduler.client.report [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Deleted allocations for instance 572c2c5f-6a24-4532-9c80-d76017e4aaa1 [ 577.661829] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653717, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.813772] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653716, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.815344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.951697] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653714, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.986824] env[62974]: DEBUG nova.network.neutron [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updated VIF entry in instance network info cache for port 47b61932-1b0f-4b88-9565-96bf61bb3912. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 577.986967] env[62974]: DEBUG nova.network.neutron [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updating instance_info_cache with network_info: [{"id": "47b61932-1b0f-4b88-9565-96bf61bb3912", "address": "fa:16:3e:57:5f:fc", "network": {"id": "5ff66071-9852-4e55-abe0-836a3842a025", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2009514308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2533bd1aab82429f8e7f4eb68cbc94e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b61932-1b", "ovs_interfaceid": "47b61932-1b0f-4b88-9565-96bf61bb3912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.009531] env[62974]: DEBUG nova.network.neutron [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Updated VIF entry in instance network info cache for port a4073f26-c2d4-4275-aced-337895f21b0c. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 578.009892] env[62974]: DEBUG nova.network.neutron [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Updating instance_info_cache with network_info: [{"id": "a4073f26-c2d4-4275-aced-337895f21b0c", "address": "fa:16:3e:a8:2c:86", "network": {"id": "b099dd4d-3410-4464-ba41-9f3bf3fd709b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-373447992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d546e5faf230414aa1cb1cb08bcc6bcc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4073f26-c2", "ovs_interfaceid": "a4073f26-c2d4-4275-aced-337895f21b0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.122935] env[62974]: DEBUG oslo_vmware.api [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653715, 'name': PowerOnVM_Task, 'duration_secs': 0.981678} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.123300] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 578.123539] env[62974]: INFO nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Took 8.21 seconds to spawn the instance on the hypervisor. [ 578.123758] env[62974]: DEBUG nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 578.125060] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6fa29d1-58b5-4563-a6b0-172a0606e36d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.148788] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653717, 'name': PowerOffVM_Task, 'duration_secs': 0.441636} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.151958] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 578.152366] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance 'd8b7a39f-ec73-4a87-9b1e-9428ca72f895' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 578.158796] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d055de2a-6c99-4843-937c-6b0ee07ebe02 tempest-DeleteServersAdminTestJSON-1238628908 tempest-DeleteServersAdminTestJSON-1238628908-project-admin] Lock "572c2c5f-6a24-4532-9c80-d76017e4aaa1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.087s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.313324] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592751} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.313694] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 1933bc47-1717-48c1-b4a2-492a17573de7/1933bc47-1717-48c1-b4a2-492a17573de7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 578.314102] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 578.314370] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-556fd2a1-6912-4690-be7f-1372f64fec5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.322285] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 578.322285] env[62974]: value = "task-2653718" [ 578.322285] env[62974]: _type = "Task" [ 578.322285] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.341588] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653718, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.450532] env[62974]: DEBUG oslo_vmware.api [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2653714, 'name': PowerOnVM_Task, 'duration_secs': 1.233009} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.451127] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 578.451408] env[62974]: INFO nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Took 11.02 seconds to spawn the instance on the hypervisor. [ 578.451550] env[62974]: DEBUG nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 578.452444] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389464f2-f8fb-46af-9174-904ec8052f26 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.491585] env[62974]: DEBUG oslo_concurrency.lockutils [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] Releasing lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.492097] env[62974]: DEBUG nova.compute.manager [req-649361ad-dd33-4116-9dd3-e250718c64fd req-61e56fb1-dcc8-4524-bea9-12d0179f1560 service nova] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Received event network-vif-deleted-7a90f0f7-f944-4a9c-84ff-875c8ea990b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 578.516079] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] Releasing lock "refresh_cache-7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.516079] env[62974]: DEBUG nova.compute.manager [req-3a2776e0-9415-40de-80fe-bd5352b90ecc req-4194baa6-88dd-45ec-ac99-43abfaafd8e4 service nova] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Received event network-vif-deleted-227bb3d2-feed-4b53-8666-bc56eb0c3d3e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 578.600606] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b364a2f-f1f4-472d-8263-ee0d05cac329 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.609163] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80298f55-a0bb-46f5-8c15-800b41986e5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.646403] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685a637d-a82d-4a94-a737-bc8a7f64d16c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.656769] env[62974]: INFO nova.compute.manager [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Took 22.77 seconds to build instance. [ 578.661164] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 578.661164] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.661292] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 578.661402] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.661543] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 578.661684] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 578.661880] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 578.662044] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 578.662213] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 578.662370] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 578.662534] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 578.671261] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b374a6f4-ab08-42c1-a356-53fc300fe28a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.684277] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cf0fd0-e573-4616-afaa-58c9f4ac208a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.689591] env[62974]: DEBUG nova.network.neutron [-] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.693977] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 578.693977] env[62974]: value = "task-2653719" [ 578.693977] env[62974]: _type = "Task" [ 578.693977] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.702676] env[62974]: DEBUG nova.compute.provider_tree [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.712999] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.834298] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653718, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070275} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.834581] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 578.835642] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a541aa98-ed96-4736-99c5-54f3abdce93f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.867091] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 1933bc47-1717-48c1-b4a2-492a17573de7/1933bc47-1717-48c1-b4a2-492a17573de7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 578.867313] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77143b62-8917-46cc-9030-baa3c887c082 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.885816] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 578.885816] env[62974]: value = "task-2653720" [ 578.885816] env[62974]: _type = "Task" [ 578.885816] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.894136] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653720, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.980097] env[62974]: INFO nova.compute.manager [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Took 23.22 seconds to build instance. [ 579.038178] env[62974]: DEBUG nova.network.neutron [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Successfully updated port: 8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 579.169532] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a0b87bc9-32c6-409d-aaed-60eafd947528 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.874s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.192635] env[62974]: INFO nova.compute.manager [-] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Took 1.76 seconds to deallocate network for instance. [ 579.211970] env[62974]: DEBUG nova.scheduler.client.report [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 579.216750] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653719, 'name': ReconfigVM_Task, 'duration_secs': 0.269626} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.216750] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance 'd8b7a39f-ec73-4a87-9b1e-9428ca72f895' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 579.398980] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653720, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.483767] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c1fbe36e-6448-419c-917d-400dcfd6c34e tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.153s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.542102] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.542102] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.542198] env[62974]: DEBUG nova.network.neutron [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.673969] env[62974]: DEBUG nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 579.695634] env[62974]: DEBUG nova.compute.manager [req-23b1357b-73cf-4e55-ac29-44008a717d7e req-108bcd08-9def-4f6b-920d-f369c6c7d2bb service nova] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Received event network-vif-deleted-8f4af602-edfd-46cd-8684-cff88d420350 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 579.695714] env[62974]: DEBUG nova.compute.manager [req-23b1357b-73cf-4e55-ac29-44008a717d7e req-108bcd08-9def-4f6b-920d-f369c6c7d2bb service nova] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Received event network-vif-deleted-44fa1a20-5950-4b22-8e9b-213c4323f03f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 579.700340] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.720856] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.727851] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 579.728190] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.728324] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 579.728764] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.728764] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 579.728764] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 579.728972] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 579.729099] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 579.729965] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 579.730108] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 579.730657] env[62974]: DEBUG nova.virt.hardware [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 579.738321] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Reconfiguring VM instance instance-00000001 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 579.738321] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.290s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.741568] env[62974]: INFO nova.compute.claims [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.745146] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-892ba57e-a85a-4fa8-b1a6-431baa7596f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.761822] env[62974]: INFO nova.scheduler.client.report [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Deleted allocations for instance 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d [ 579.772592] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 579.772592] env[62974]: value = "task-2653721" [ 579.772592] env[62974]: _type = "Task" [ 579.772592] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.782661] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.900701] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653720, 'name': ReconfigVM_Task, 'duration_secs': 0.63535} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.901393] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 1933bc47-1717-48c1-b4a2-492a17573de7/1933bc47-1717-48c1-b4a2-492a17573de7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 579.901663] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ef2509d-202c-493e-8a5b-e74824510dce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.910656] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 579.910656] env[62974]: value = "task-2653722" [ 579.910656] env[62974]: _type = "Task" [ 579.910656] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.919507] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653722, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.987024] env[62974]: DEBUG nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 580.092108] env[62974]: DEBUG nova.network.neutron [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.172137] env[62974]: DEBUG nova.compute.manager [req-c4420efd-1673-4039-b43f-75c09a4afbd3 req-555deb8c-2d5f-4f7c-8b88-a7c5ca357ae4 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Received event network-vif-plugged-8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 580.173193] env[62974]: DEBUG oslo_concurrency.lockutils [req-c4420efd-1673-4039-b43f-75c09a4afbd3 req-555deb8c-2d5f-4f7c-8b88-a7c5ca357ae4 service nova] Acquiring lock "f9adcd7e-58a0-433c-8602-cca814b84aaa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.173193] env[62974]: DEBUG oslo_concurrency.lockutils [req-c4420efd-1673-4039-b43f-75c09a4afbd3 req-555deb8c-2d5f-4f7c-8b88-a7c5ca357ae4 service nova] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.173193] env[62974]: DEBUG oslo_concurrency.lockutils [req-c4420efd-1673-4039-b43f-75c09a4afbd3 req-555deb8c-2d5f-4f7c-8b88-a7c5ca357ae4 service nova] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.173193] env[62974]: DEBUG nova.compute.manager [req-c4420efd-1673-4039-b43f-75c09a4afbd3 req-555deb8c-2d5f-4f7c-8b88-a7c5ca357ae4 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] No waiting events found dispatching network-vif-plugged-8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 580.173193] env[62974]: WARNING nova.compute.manager [req-c4420efd-1673-4039-b43f-75c09a4afbd3 req-555deb8c-2d5f-4f7c-8b88-a7c5ca357ae4 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Received unexpected event network-vif-plugged-8e95b6b2-a646-4f70-9191-7305ffd14c84 for instance with vm_state building and task_state spawning. [ 580.205170] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.271502] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d5e96670-8969-415d-8178-5cf5c84c60b8 tempest-ServerDiagnosticsNegativeTest-622017640 tempest-ServerDiagnosticsNegativeTest-622017640-project-member] Lock "124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.297s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.286127] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653721, 'name': ReconfigVM_Task, 'duration_secs': 0.207332} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.286727] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Reconfigured VM instance instance-00000001 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 580.287792] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53c6756-0d5f-46e0-8d31-f0e7fd734fc2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.317868] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] d8b7a39f-ec73-4a87-9b1e-9428ca72f895/d8b7a39f-ec73-4a87-9b1e-9428ca72f895.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 580.319428] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c51ccba-cc8d-419d-96d2-68ca34dcbd94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.339490] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 580.339490] env[62974]: value = "task-2653723" [ 580.339490] env[62974]: _type = "Task" [ 580.339490] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.348120] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.422177] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653722, 'name': Rename_Task, 'duration_secs': 0.170787} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.422605] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 580.423081] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1be1979d-6f80-4292-9920-7800c7e1999c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.429481] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 580.429481] env[62974]: value = "task-2653724" [ 580.429481] env[62974]: _type = "Task" [ 580.429481] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.438767] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.510434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.622634] env[62974]: DEBUG nova.network.neutron [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Updating instance_info_cache with network_info: [{"id": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "address": "fa:16:3e:cd:f7:77", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e95b6b2-a6", "ovs_interfaceid": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.830693] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "22a0a34a-c46b-4246-9a80-3540550bd793" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.830949] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "22a0a34a-c46b-4246-9a80-3540550bd793" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.854763] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653723, 'name': ReconfigVM_Task, 'duration_secs': 0.334966} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.855050] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Reconfigured VM instance instance-00000001 to attach disk [datastore2] d8b7a39f-ec73-4a87-9b1e-9428ca72f895/d8b7a39f-ec73-4a87-9b1e-9428ca72f895.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 580.855954] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance 'd8b7a39f-ec73-4a87-9b1e-9428ca72f895' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 580.947775] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653724, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.128617] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Releasing lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.129476] env[62974]: DEBUG nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Instance network_info: |[{"id": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "address": "fa:16:3e:cd:f7:77", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e95b6b2-a6", "ovs_interfaceid": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 581.130111] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:f7:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5116f690-f825-4fee-8a47-42b073e716c5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e95b6b2-a646-4f70-9191-7305ffd14c84', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 581.141210] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Creating folder: Project (8a095f717f7d4c1e81311a0810eed958). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.141613] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a97404b0-21fc-4149-8bd4-551637501b82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.155461] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Created folder: Project (8a095f717f7d4c1e81311a0810eed958) in parent group-v535199. [ 581.155691] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Creating folder: Instances. Parent ref: group-v535240. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.155969] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9266f244-8a26-4fac-a373-96ef64afba23 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.173444] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Created folder: Instances in parent group-v535240. [ 581.173717] env[62974]: DEBUG oslo.service.loopingcall [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.173873] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 581.174125] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8079eee4-3d4e-424b-868e-7a1738a1014e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.197418] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 581.197418] env[62974]: value = "task-2653727" [ 581.197418] env[62974]: _type = "Task" [ 581.197418] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.206095] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653727, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.235911] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb1bb48-f2d7-4748-8d0f-faf1110da5c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.244319] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1f84e9-c024-46c8-ab4c-607cb7a858fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.281600] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02d10fc-2d69-4548-9e50-fda2c2c1fe37 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.287713] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031ea876-7fba-4b04-8e6b-6aa53a812e0d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.302819] env[62974]: DEBUG nova.compute.provider_tree [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.365517] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cb6e66-7692-4cdb-8538-87f1908c939e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.391119] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3fe14e-0db4-4434-ac62-9896bb8e9b61 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.418725] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance 'd8b7a39f-ec73-4a87-9b1e-9428ca72f895' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 581.444761] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653724, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.706724] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653727, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.807960] env[62974]: DEBUG nova.scheduler.client.report [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 581.948603] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653724, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.127241] env[62974]: DEBUG nova.network.neutron [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Port 947659a6-f0ce-4065-a591-6a15666e4ac5 binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 582.209967] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653727, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.314516] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.314516] env[62974]: DEBUG nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 582.318113] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.446s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.319570] env[62974]: INFO nova.compute.claims [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.434271] env[62974]: DEBUG nova.compute.manager [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 582.435231] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eadc285-0bb1-45f9-b10a-e3905bb0c579 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.456331] env[62974]: DEBUG oslo_vmware.api [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2653724, 'name': PowerOnVM_Task, 'duration_secs': 1.614127} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.458266] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 582.458643] env[62974]: INFO nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Took 9.97 seconds to spawn the instance on the hypervisor. [ 582.458950] env[62974]: DEBUG nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 582.462119] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c605b55-4125-4bf6-8505-008fd3fe9b33 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.502695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "30fcd64c-4570-454b-a7e5-3246c92d90fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.503024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.711095] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653727, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.825449] env[62974]: DEBUG nova.compute.utils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.826867] env[62974]: DEBUG nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 582.828035] env[62974]: DEBUG nova.network.neutron [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.967688] env[62974]: INFO nova.compute.manager [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] instance snapshotting [ 582.971950] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74267064-7905-4d1e-b2f9-18c298534e32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.006183] env[62974]: INFO nova.compute.manager [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Took 25.04 seconds to build instance. [ 583.008089] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6549ddc9-e88b-4c27-9397-381985189b0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.031118] env[62974]: DEBUG nova.policy [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e08ff9a28e0d4035b2d718fff54f2619', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57827584df3c485ca936672aebb4c992', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 583.165022] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.165022] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.165022] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.210991] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653727, 'name': CreateVM_Task, 'duration_secs': 1.976338} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.212110] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 583.217460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.217460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.217460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 583.217460] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72717d36-3a21-4945-b483-5d8d75f0f599 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.225120] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 583.225120] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a90de6-8b30-3294-9327-970536f9eea2" [ 583.225120] env[62974]: _type = "Task" [ 583.225120] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.234946] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a90de6-8b30-3294-9327-970536f9eea2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.334483] env[62974]: DEBUG nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 583.343383] env[62974]: DEBUG nova.compute.manager [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Received event network-changed-7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 583.343431] env[62974]: DEBUG nova.compute.manager [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Refreshing instance network info cache due to event network-changed-7e4b21ba-e0f2-4104-8f46-57871fd6ed16. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 583.343637] env[62974]: DEBUG oslo_concurrency.lockutils [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] Acquiring lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.343950] env[62974]: DEBUG oslo_concurrency.lockutils [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] Acquired lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.343950] env[62974]: DEBUG nova.network.neutron [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Refreshing network info cache for port 7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.406034] env[62974]: DEBUG nova.compute.manager [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Received event network-changed-8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 583.406034] env[62974]: DEBUG nova.compute.manager [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Refreshing instance network info cache due to event network-changed-8e95b6b2-a646-4f70-9191-7305ffd14c84. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 583.406034] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] Acquiring lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.406034] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] Acquired lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.406285] env[62974]: DEBUG nova.network.neutron [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Refreshing network info cache for port 8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.513842] env[62974]: DEBUG oslo_concurrency.lockutils [None req-000b50a0-d1d2-49a1-bbfa-8de16699ab39 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.094s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.522646] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 583.522816] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c2f75523-dfd5-4ca1-b349-49b15217ccc6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.531045] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 583.531045] env[62974]: value = "task-2653728" [ 583.531045] env[62974]: _type = "Task" [ 583.531045] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.547011] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653728, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.743380] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a90de6-8b30-3294-9327-970536f9eea2, 'name': SearchDatastore_Task, 'duration_secs': 0.023979} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.746383] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.746606] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 583.746833] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.746974] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.747189] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 583.748873] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cd31d65-626b-4c72-a01d-e9e316af4dec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.758743] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 583.758834] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 583.759585] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-611a1163-7f27-411f-8eb0-cabe9d8b7019 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.769000] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 583.769000] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5228c5ae-122a-1417-3581-7ac6fb66db5b" [ 583.769000] env[62974]: _type = "Task" [ 583.769000] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.781711] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5228c5ae-122a-1417-3581-7ac6fb66db5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.827335] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc703811-2583-4bd9-a720-57b744fd1f86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.836781] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5c9371-c110-4abd-9366-f4778e66a0fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.892980] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb1a4f8-6bc2-4ad6-93c2-2c5af5bd2919 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.904209] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db69d113-497e-4bbc-9497-f2af90974778 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.921536] env[62974]: DEBUG nova.compute.provider_tree [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.017340] env[62974]: DEBUG nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 584.047756] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653728, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.280211] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5228c5ae-122a-1417-3581-7ac6fb66db5b, 'name': SearchDatastore_Task, 'duration_secs': 0.012851} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.281609] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7ce8e81-f85e-4d6e-bdb4-71b2f863d610 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.286827] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 584.286827] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5225cd19-c2f8-7339-1bfe-0397f4d560ee" [ 584.286827] env[62974]: _type = "Task" [ 584.286827] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.294511] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5225cd19-c2f8-7339-1bfe-0397f4d560ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.304786] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.304963] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.305350] env[62974]: DEBUG nova.network.neutron [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 584.346996] env[62974]: DEBUG nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 584.376047] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 584.376400] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.376516] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 584.377382] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.377382] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 584.377495] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 584.377995] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 584.377995] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 584.377995] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 584.378139] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 584.378285] env[62974]: DEBUG nova.virt.hardware [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 584.379481] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12dc476-e44a-4156-a8dd-04df051789ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.387701] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f92e895-222f-4289-9606-abcae51b7551 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.393360] env[62974]: DEBUG nova.network.neutron [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Successfully created port: b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.427338] env[62974]: DEBUG nova.scheduler.client.report [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 584.545817] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653728, 'name': CreateSnapshot_Task, 'duration_secs': 1.011872} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.547777] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.547953] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 584.552532] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994a2b33-9a56-48bb-b015-8dd04f95fd18 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.565188] env[62974]: DEBUG nova.network.neutron [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Updated VIF entry in instance network info cache for port 8e95b6b2-a646-4f70-9191-7305ffd14c84. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 584.565509] env[62974]: DEBUG nova.network.neutron [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Updating instance_info_cache with network_info: [{"id": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "address": "fa:16:3e:cd:f7:77", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e95b6b2-a6", "ovs_interfaceid": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.645268] env[62974]: DEBUG nova.network.neutron [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updated VIF entry in instance network info cache for port 7e4b21ba-e0f2-4104-8f46-57871fd6ed16. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 584.646093] env[62974]: DEBUG nova.network.neutron [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [{"id": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "address": "fa:16:3e:34:87:aa", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b21ba-e0", "ovs_interfaceid": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.800549] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5225cd19-c2f8-7339-1bfe-0397f4d560ee, 'name': SearchDatastore_Task, 'duration_secs': 0.011527} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.800549] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.800549] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] f9adcd7e-58a0-433c-8602-cca814b84aaa/f9adcd7e-58a0-433c-8602-cca814b84aaa.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 584.801590] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-183e3041-7467-4a8b-868c-c7971299520f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.809650] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 584.809650] env[62974]: value = "task-2653729" [ 584.809650] env[62974]: _type = "Task" [ 584.809650] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.824775] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.930085] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.931757] env[62974]: DEBUG nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 584.937781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.438s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.939566] env[62974]: INFO nova.compute.claims [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.080737] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 585.082367] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] Releasing lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.083688] env[62974]: DEBUG nova.compute.manager [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Received event network-changed-c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 585.083688] env[62974]: DEBUG nova.compute.manager [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Refreshing instance network info cache due to event network-changed-c1dbf093-9abb-4c1d-a4bc-163058074d4f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 585.083688] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] Acquiring lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.083688] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] Acquired lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.083917] env[62974]: DEBUG nova.network.neutron [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Refreshing network info cache for port c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 585.087053] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1b561153-ba66-484d-9918-b51a79c2425b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.100768] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 585.100768] env[62974]: value = "task-2653730" [ 585.100768] env[62974]: _type = "Task" [ 585.100768] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.115049] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653730, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.151428] env[62974]: DEBUG oslo_concurrency.lockutils [req-1083153b-248e-4eb9-b76d-3ae4e122551d req-eb7a7b7f-5fe4-4453-b152-61f8e57baf8f service nova] Releasing lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.324879] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653729, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.447069] env[62974]: DEBUG nova.compute.utils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.452052] env[62974]: DEBUG nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 585.454826] env[62974]: DEBUG nova.network.neutron [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 585.614784] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653730, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.798131] env[62974]: DEBUG nova.network.neutron [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [{"id": "947659a6-f0ce-4065-a591-6a15666e4ac5", "address": "fa:16:3e:f1:cd:d9", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947659a6-f0", "ovs_interfaceid": "947659a6-f0ce-4065-a591-6a15666e4ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.817118] env[62974]: DEBUG nova.policy [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf269a93e9d64b87a135c3e207ce1466', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13053a9449eb4b14a13ad720083975db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 585.827920] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642875} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.827920] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] f9adcd7e-58a0-433c-8602-cca814b84aaa/f9adcd7e-58a0-433c-8602-cca814b84aaa.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 585.827920] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 585.828341] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa3952a0-650d-4978-8d9d-8bc63d8c9a57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.838017] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 585.838017] env[62974]: value = "task-2653731" [ 585.838017] env[62974]: _type = "Task" [ 585.838017] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.854127] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.952950] env[62974]: DEBUG nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 586.112937] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653730, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.302813] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.353495] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068608} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.353786] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 586.355227] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2249b29d-16e8-4c4d-8a50-495f31160da4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.385828] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] f9adcd7e-58a0-433c-8602-cca814b84aaa/f9adcd7e-58a0-433c-8602-cca814b84aaa.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 586.386958] env[62974]: DEBUG nova.network.neutron [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updated VIF entry in instance network info cache for port c1dbf093-9abb-4c1d-a4bc-163058074d4f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 586.387667] env[62974]: DEBUG nova.network.neutron [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updating instance_info_cache with network_info: [{"id": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "address": "fa:16:3e:be:01:c4", "network": {"id": "f0afd336-13eb-49da-8643-c6a4c51451d7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-622006440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf9a2a44db94217bdd7652ef27b5737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1dbf093-9a", "ovs_interfaceid": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.391093] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0763e0d6-b51c-493e-807f-5fb7271770fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.427895] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 586.427895] env[62974]: value = "task-2653732" [ 586.427895] env[62974]: _type = "Task" [ 586.427895] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.437211] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653732, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.492747] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01601296-9736-43c8-b69a-680aff595ae7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.502320] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b7fbd4-9e3b-4edd-8b5c-65bde29a344c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.537738] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039944a8-2c2d-4c66-b563-651d6be54d16 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.552229] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2266fe17-e33d-4884-9422-9b3282cbb900 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.558480] env[62974]: DEBUG nova.network.neutron [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Successfully created port: e786c602-63b4-4d89-80a2-b141043584eb {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.571042] env[62974]: DEBUG nova.compute.provider_tree [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.613682] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653730, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.837853] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4af066-8c9a-4826-a68c-a6cca412d293 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.862356] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bce2bf4-25d1-44f0-9aeb-c55f120285b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.870649] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance 'd8b7a39f-ec73-4a87-9b1e-9428ca72f895' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 586.890553] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f605f66-30b7-48bd-95f0-ede5ca881735 req-fddb6a24-ea22-47b7-84df-c3a7fc99d426 service nova] Releasing lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.937609] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653732, 'name': ReconfigVM_Task, 'duration_secs': 0.318913} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.937877] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Reconfigured VM instance instance-0000000f to attach disk [datastore1] f9adcd7e-58a0-433c-8602-cca814b84aaa/f9adcd7e-58a0-433c-8602-cca814b84aaa.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 586.938646] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ea8f082-9ef2-43fc-985b-2637618f3bcc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.944545] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 586.944545] env[62974]: value = "task-2653733" [ 586.944545] env[62974]: _type = "Task" [ 586.944545] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.953929] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653733, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.969322] env[62974]: DEBUG nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 586.997685] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 586.997935] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.998103] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 586.998338] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.998493] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 586.998644] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 586.998941] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 586.999428] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 586.999671] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 586.999726] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 586.999873] env[62974]: DEBUG nova.virt.hardware [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 587.000800] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b8649c-ff48-404f-b860-9930194295a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.010274] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36d1350-603f-45af-b858-5f75b1866a67 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.074849] env[62974]: DEBUG nova.scheduler.client.report [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 587.116780] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653730, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.380160] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 587.380160] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f74483b2-d8e7-40f1-9d57-f8b524c61f3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.389270] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 587.389270] env[62974]: value = "task-2653734" [ 587.389270] env[62974]: _type = "Task" [ 587.389270] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.400967] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653734, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.454412] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653733, 'name': Rename_Task, 'duration_secs': 0.14684} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.454709] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 587.455604] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-def9c6e6-9cd5-4452-990f-2a5aa03f388e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.466302] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 587.466302] env[62974]: value = "task-2653735" [ 587.466302] env[62974]: _type = "Task" [ 587.466302] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.479137] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.583023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.583023] env[62974]: DEBUG nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 587.584726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.082s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.586362] env[62974]: INFO nova.compute.claims [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.619146] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653730, 'name': CloneVM_Task, 'duration_secs': 2.237529} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.619833] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Created linked-clone VM from snapshot [ 587.620731] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cf5860-c948-4f7c-81a6-4992afa66828 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.629597] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Uploading image e11d081a-50c5-4669-93e4-b37da91d7314 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 587.656457] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 587.656457] env[62974]: value = "vm-535244" [ 587.656457] env[62974]: _type = "VirtualMachine" [ 587.656457] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 587.656731] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-66791836-d175-4ee5-8b71-6c4f00699298 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.668144] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lease: (returnval){ [ 587.668144] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269372d-ec5f-f541-f2e9-6f98d95e5992" [ 587.668144] env[62974]: _type = "HttpNfcLease" [ 587.668144] env[62974]: } obtained for exporting VM: (result){ [ 587.668144] env[62974]: value = "vm-535244" [ 587.668144] env[62974]: _type = "VirtualMachine" [ 587.668144] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 587.668144] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the lease: (returnval){ [ 587.668144] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269372d-ec5f-f541-f2e9-6f98d95e5992" [ 587.668144] env[62974]: _type = "HttpNfcLease" [ 587.668144] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 587.675130] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 587.675130] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269372d-ec5f-f541-f2e9-6f98d95e5992" [ 587.675130] env[62974]: _type = "HttpNfcLease" [ 587.675130] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 587.900815] env[62974]: DEBUG oslo_vmware.api [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2653734, 'name': PowerOnVM_Task, 'duration_secs': 0.428013} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.901113] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 587.901422] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e2021c-908a-4981-81c1-9e7104aca8fc tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance 'd8b7a39f-ec73-4a87-9b1e-9428ca72f895' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 587.979145] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653735, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.097188] env[62974]: DEBUG nova.compute.utils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.100285] env[62974]: DEBUG nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 588.100285] env[62974]: DEBUG nova.network.neutron [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 588.164051] env[62974]: DEBUG nova.policy [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e07ae60010640d88de0d3b716914186', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd914830aaf454e26b77cbb46722764ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 588.174222] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 588.174222] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269372d-ec5f-f541-f2e9-6f98d95e5992" [ 588.174222] env[62974]: _type = "HttpNfcLease" [ 588.174222] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 588.174798] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 588.174798] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269372d-ec5f-f541-f2e9-6f98d95e5992" [ 588.174798] env[62974]: _type = "HttpNfcLease" [ 588.174798] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 588.175538] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24555ff-d39d-465a-bdec-05fd0be497ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.185978] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5210fb3d-1b8c-ade7-323e-de062c22e6e3/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 588.186233] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5210fb3d-1b8c-ade7-323e-de062c22e6e3/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 588.402145] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-936ef7c6-3d18-4b2e-8ca3-37f797587a04 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.480813] env[62974]: DEBUG oslo_vmware.api [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2653735, 'name': PowerOnVM_Task, 'duration_secs': 0.695959} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.481417] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 588.485234] env[62974]: INFO nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Took 10.92 seconds to spawn the instance on the hypervisor. [ 588.485234] env[62974]: DEBUG nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 588.485234] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a3cac3-306d-40f9-be6d-d976c53395bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.486491] env[62974]: DEBUG nova.network.neutron [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Successfully updated port: b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 588.608771] env[62974]: DEBUG nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 588.885175] env[62974]: DEBUG nova.network.neutron [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Successfully updated port: e786c602-63b4-4d89-80a2-b141043584eb {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 588.990017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.996832] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquired lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.996994] env[62974]: DEBUG nova.network.neutron [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.022112] env[62974]: DEBUG nova.network.neutron [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Successfully created port: b07f0ace-3474-4ef6-81c7-2959c86f0791 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.033964] env[62974]: INFO nova.compute.manager [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Took 26.99 seconds to build instance. [ 589.205039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33653605-324a-4632-b811-45d96081f14d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.215395] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d71a48-d4aa-4ccc-9662-036c81f0dc3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.256093] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3103c551-f4c6-47ae-b242-0a49069ee03a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.264355] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da4fbde-ed56-42c5-8cb7-f7b1927ea839 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.282969] env[62974]: DEBUG nova.compute.provider_tree [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.395320] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "refresh_cache-ecde0e49-c344-4003-b858-8312c1ac344f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.395576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired lock "refresh_cache-ecde0e49-c344-4003-b858-8312c1ac344f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.395654] env[62974]: DEBUG nova.network.neutron [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.538408] env[62974]: DEBUG oslo_concurrency.lockutils [None req-520f8a7a-59ab-48f4-b938-34f855551e7b tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.529s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.623058] env[62974]: DEBUG nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 589.654135] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 589.654596] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.654929] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 589.655201] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.655416] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 589.655658] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 589.655929] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 589.656161] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 589.659016] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 589.659016] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 589.659016] env[62974]: DEBUG nova.virt.hardware [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 589.659016] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d370e836-86f3-4e77-b28d-d90a77f8fb52 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.669158] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d0716c-d839-46c6-9492-04119a201d13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.707158] env[62974]: DEBUG nova.network.neutron [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.786288] env[62974]: DEBUG nova.scheduler.client.report [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 589.871392] env[62974]: DEBUG nova.compute.manager [req-0fbb3424-c2cb-4181-b07f-332abc50a98e req-5e4d6942-1377-4ef5-b2c6-d4955b130e78 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received event network-vif-plugged-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 589.874153] env[62974]: DEBUG oslo_concurrency.lockutils [req-0fbb3424-c2cb-4181-b07f-332abc50a98e req-5e4d6942-1377-4ef5-b2c6-d4955b130e78 service nova] Acquiring lock "586a3541-060f-4859-8507-17faa637b17e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.874153] env[62974]: DEBUG oslo_concurrency.lockutils [req-0fbb3424-c2cb-4181-b07f-332abc50a98e req-5e4d6942-1377-4ef5-b2c6-d4955b130e78 service nova] Lock "586a3541-060f-4859-8507-17faa637b17e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.874153] env[62974]: DEBUG oslo_concurrency.lockutils [req-0fbb3424-c2cb-4181-b07f-332abc50a98e req-5e4d6942-1377-4ef5-b2c6-d4955b130e78 service nova] Lock "586a3541-060f-4859-8507-17faa637b17e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.874153] env[62974]: DEBUG nova.compute.manager [req-0fbb3424-c2cb-4181-b07f-332abc50a98e req-5e4d6942-1377-4ef5-b2c6-d4955b130e78 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] No waiting events found dispatching network-vif-plugged-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 589.874153] env[62974]: WARNING nova.compute.manager [req-0fbb3424-c2cb-4181-b07f-332abc50a98e req-5e4d6942-1377-4ef5-b2c6-d4955b130e78 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received unexpected event network-vif-plugged-b8fd7c55-6daa-4314-8b00-89aea7879581 for instance with vm_state building and task_state spawning. [ 589.967419] env[62974]: DEBUG nova.network.neutron [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.047797] env[62974]: DEBUG nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 590.165176] env[62974]: DEBUG nova.network.neutron [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.296182] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.298050] env[62974]: DEBUG nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 590.299415] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.380s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.302658] env[62974]: INFO nova.compute.claims [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.399602] env[62974]: DEBUG nova.network.neutron [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Updating instance_info_cache with network_info: [{"id": "e786c602-63b4-4d89-80a2-b141043584eb", "address": "fa:16:3e:3b:29:47", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape786c602-63", "ovs_interfaceid": "e786c602-63b4-4d89-80a2-b141043584eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.507898] env[62974]: DEBUG nova.compute.manager [req-c1670048-c9b0-439a-b8fa-84336b8d4bf0 req-7951dd17-b186-4f07-9d23-1d0332fa45b0 service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Received event network-vif-plugged-e786c602-63b4-4d89-80a2-b141043584eb {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 590.507898] env[62974]: DEBUG oslo_concurrency.lockutils [req-c1670048-c9b0-439a-b8fa-84336b8d4bf0 req-7951dd17-b186-4f07-9d23-1d0332fa45b0 service nova] Acquiring lock "ecde0e49-c344-4003-b858-8312c1ac344f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.507898] env[62974]: DEBUG oslo_concurrency.lockutils [req-c1670048-c9b0-439a-b8fa-84336b8d4bf0 req-7951dd17-b186-4f07-9d23-1d0332fa45b0 service nova] Lock "ecde0e49-c344-4003-b858-8312c1ac344f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.507898] env[62974]: DEBUG oslo_concurrency.lockutils [req-c1670048-c9b0-439a-b8fa-84336b8d4bf0 req-7951dd17-b186-4f07-9d23-1d0332fa45b0 service nova] Lock "ecde0e49-c344-4003-b858-8312c1ac344f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.507898] env[62974]: DEBUG nova.compute.manager [req-c1670048-c9b0-439a-b8fa-84336b8d4bf0 req-7951dd17-b186-4f07-9d23-1d0332fa45b0 service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] No waiting events found dispatching network-vif-plugged-e786c602-63b4-4d89-80a2-b141043584eb {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 590.508690] env[62974]: WARNING nova.compute.manager [req-c1670048-c9b0-439a-b8fa-84336b8d4bf0 req-7951dd17-b186-4f07-9d23-1d0332fa45b0 service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Received unexpected event network-vif-plugged-e786c602-63b4-4d89-80a2-b141043584eb for instance with vm_state building and task_state spawning. [ 590.583850] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.667548] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Releasing lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.667873] env[62974]: DEBUG nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Instance network_info: |[{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 590.671644] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:39:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8fd7c55-6daa-4314-8b00-89aea7879581', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.679873] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Creating folder: Project (57827584df3c485ca936672aebb4c992). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.680251] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-521c4002-075f-4a24-85e2-31f6737d05a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.692597] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Created folder: Project (57827584df3c485ca936672aebb4c992) in parent group-v535199. [ 590.692951] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Creating folder: Instances. Parent ref: group-v535245. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.693284] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfb4184c-9308-41e0-8928-208ce8cd04b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.704315] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Created folder: Instances in parent group-v535245. [ 590.704821] env[62974]: DEBUG oslo.service.loopingcall [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.705207] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 586a3541-060f-4859-8507-17faa637b17e] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 590.705761] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dda84a9e-3108-435d-9fc7-07bb90bf3d6c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.736041] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.736041] env[62974]: value = "task-2653739" [ 590.736041] env[62974]: _type = "Task" [ 590.736041] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.748762] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653739, 'name': CreateVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.807259] env[62974]: DEBUG nova.compute.utils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.816113] env[62974]: DEBUG nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 590.816316] env[62974]: DEBUG nova.network.neutron [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.902595] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Releasing lock "refresh_cache-ecde0e49-c344-4003-b858-8312c1ac344f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.905200] env[62974]: DEBUG nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Instance network_info: |[{"id": "e786c602-63b4-4d89-80a2-b141043584eb", "address": "fa:16:3e:3b:29:47", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape786c602-63", "ovs_interfaceid": "e786c602-63b4-4d89-80a2-b141043584eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 590.906406] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:29:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e786c602-63b4-4d89-80a2-b141043584eb', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.930122] env[62974]: DEBUG oslo.service.loopingcall [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.930666] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 590.931082] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8bcc8eb-bba4-4af8-9e91-838abfbf0175 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.962929] env[62974]: DEBUG nova.policy [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85705a53f9314b08aed10199854f0d2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc2dc33e40e549d1a025e4b883c4dfb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 590.970846] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.970846] env[62974]: value = "task-2653740" [ 590.970846] env[62974]: _type = "Task" [ 590.970846] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.981923] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653740, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.250366] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653739, 'name': CreateVM_Task, 'duration_secs': 0.400866} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.250554] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 586a3541-060f-4859-8507-17faa637b17e] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 591.251292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.251445] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.251770] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 591.252050] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c0954d2-4156-49fd-a2b9-8f3c60e33395 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.265273] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 591.265273] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5274efa7-81da-34c9-c027-abc6e84b4b2b" [ 591.265273] env[62974]: _type = "Task" [ 591.265273] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.278994] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5274efa7-81da-34c9-c027-abc6e84b4b2b, 'name': SearchDatastore_Task, 'duration_secs': 0.012415} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.279383] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.279630] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.279894] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.280062] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.280267] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.280571] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a185d289-392f-4dc0-bb5b-bd9792868a08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.291259] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.291461] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 591.292306] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-107114fd-6625-4ee0-80ab-4abba8b21a1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.298343] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 591.298343] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522f9c75-9e80-408a-afed-d19423c15ba7" [ 591.298343] env[62974]: _type = "Task" [ 591.298343] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.310262] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522f9c75-9e80-408a-afed-d19423c15ba7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.317099] env[62974]: DEBUG nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 591.360229] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "a7a014b9-10e1-45a0-85da-4754051e8d82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.360648] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.488251] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653740, 'name': CreateVM_Task, 'duration_secs': 0.417192} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.488251] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 591.489388] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.489488] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.490539] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 591.490539] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e168f63a-6331-4219-b548-b2949893fcb4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.500331] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 591.500331] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5259f20d-4755-8cf7-d31f-bcf4e047db5a" [ 591.500331] env[62974]: _type = "Task" [ 591.500331] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.512174] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5259f20d-4755-8cf7-d31f-bcf4e047db5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.642203] env[62974]: DEBUG nova.network.neutron [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Successfully updated port: b07f0ace-3474-4ef6-81c7-2959c86f0791 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 591.810017] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522f9c75-9e80-408a-afed-d19423c15ba7, 'name': SearchDatastore_Task, 'duration_secs': 0.012337} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.813895] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ba2e75-6084-4133-8608-b453989150d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.821747] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 591.821747] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5216a476-aeac-aed8-fa73-5e79baeae742" [ 591.821747] env[62974]: _type = "Task" [ 591.821747] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.842560] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5216a476-aeac-aed8-fa73-5e79baeae742, 'name': SearchDatastore_Task, 'duration_secs': 0.014612} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.842560] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.842764] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 586a3541-060f-4859-8507-17faa637b17e/586a3541-060f-4859-8507-17faa637b17e.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 591.843202] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e731d9f-208d-4ddd-a90c-63bd3f45ea1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.851634] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4a54f2-097d-41e7-b049-0d3f6b7f0546 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.857165] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 591.857165] env[62974]: value = "task-2653741" [ 591.857165] env[62974]: _type = "Task" [ 591.857165] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.864676] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91332ae-ae19-4b61-815b-37b5fec58113 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.871395] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.901570] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef426141-16c8-4d17-9c35-6146897619f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.910641] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a4f7ee-e7bd-4582-b4cf-0b6a3cacbb7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.926914] env[62974]: DEBUG nova.compute.provider_tree [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.012713] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5259f20d-4755-8cf7-d31f-bcf4e047db5a, 'name': SearchDatastore_Task, 'duration_secs': 0.021334} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.014395] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.018113] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.018113] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.018113] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.018113] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.018113] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a445ec2b-4bf8-430a-ada0-03085a893312 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.030018] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.030283] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.031107] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cf94013-548b-432e-ba00-9e091a77e583 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.043983] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 592.043983] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524aea67-07b5-489d-c596-911e3f9077ac" [ 592.043983] env[62974]: _type = "Task" [ 592.043983] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.054596] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524aea67-07b5-489d-c596-911e3f9077ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.145294] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "refresh_cache-6dc914e9-bce5-4a19-a919-ae94981ea800" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.145449] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "refresh_cache-6dc914e9-bce5-4a19-a919-ae94981ea800" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.145506] env[62974]: DEBUG nova.network.neutron [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 592.337041] env[62974]: DEBUG nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 592.348088] env[62974]: DEBUG nova.network.neutron [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Successfully created port: f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 592.374290] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653741, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.378379] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 592.378379] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.379191] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 592.379657] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.379657] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 592.379780] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 592.379977] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 592.380168] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 592.380383] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 592.380555] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 592.380885] env[62974]: DEBUG nova.virt.hardware [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 592.382141] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ec8663-60d7-4220-8f42-347c71b33a53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.391124] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ab0b2c-baa2-4457-9a1d-5cc8a54a71dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.431309] env[62974]: DEBUG nova.scheduler.client.report [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.546688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.546688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.546688] env[62974]: DEBUG nova.compute.manager [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Going to confirm migration 1 {{(pid=62974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 592.567181] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524aea67-07b5-489d-c596-911e3f9077ac, 'name': SearchDatastore_Task, 'duration_secs': 0.013593} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.569033] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e481a34e-fc97-4a68-8dd6-be00b80f333b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.575970] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 592.575970] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520cdc9a-8699-56d4-a39b-5be41c480e72" [ 592.575970] env[62974]: _type = "Task" [ 592.575970] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.588733] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520cdc9a-8699-56d4-a39b-5be41c480e72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.696227] env[62974]: DEBUG nova.network.neutron [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.869138] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566782} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.869910] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 586a3541-060f-4859-8507-17faa637b17e/586a3541-060f-4859-8507-17faa637b17e.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 592.870921] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 592.870921] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95be6663-a308-4470-aaf9-d4c70787c7ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.878997] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 592.878997] env[62974]: value = "task-2653742" [ 592.878997] env[62974]: _type = "Task" [ 592.878997] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.892033] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653742, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.918227] env[62974]: DEBUG nova.network.neutron [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Updating instance_info_cache with network_info: [{"id": "b07f0ace-3474-4ef6-81c7-2959c86f0791", "address": "fa:16:3e:73:f8:ad", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb07f0ace-34", "ovs_interfaceid": "b07f0ace-3474-4ef6-81c7-2959c86f0791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.934976] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.935453] env[62974]: DEBUG nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 592.938187] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.923s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.938653] env[62974]: DEBUG nova.objects.instance [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lazy-loading 'resources' on Instance uuid 2313468e-820f-4fff-bdeb-5d542c94584d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 593.089934] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520cdc9a-8699-56d4-a39b-5be41c480e72, 'name': SearchDatastore_Task, 'duration_secs': 0.013775} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.090695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.090695] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] ecde0e49-c344-4003-b858-8312c1ac344f/ecde0e49-c344-4003-b858-8312c1ac344f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 593.090903] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19540715-b5ef-4e24-adb5-8942810cd815 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.104868] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 593.104868] env[62974]: value = "task-2653743" [ 593.104868] env[62974]: _type = "Task" [ 593.104868] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.121960] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.174637] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.175074] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.175074] env[62974]: DEBUG nova.network.neutron [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.175215] env[62974]: DEBUG nova.objects.instance [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lazy-loading 'info_cache' on Instance uuid d8b7a39f-ec73-4a87-9b1e-9428ca72f895 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 593.392082] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653742, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072508} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.393160] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 593.393160] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c334dd-0259-476d-8081-78381e43f6b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.419430] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 586a3541-060f-4859-8507-17faa637b17e/586a3541-060f-4859-8507-17faa637b17e.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 593.419993] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67fd0b50-4d2b-428f-8ea4-394789edad9d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.440702] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "refresh_cache-6dc914e9-bce5-4a19-a919-ae94981ea800" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.440702] env[62974]: DEBUG nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Instance network_info: |[{"id": "b07f0ace-3474-4ef6-81c7-2959c86f0791", "address": "fa:16:3e:73:f8:ad", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb07f0ace-34", "ovs_interfaceid": "b07f0ace-3474-4ef6-81c7-2959c86f0791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 593.446025] env[62974]: DEBUG nova.compute.utils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.446983] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:f8:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06cc7c49-c46c-4c1e-bf51-77e9ea802c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b07f0ace-3474-4ef6-81c7-2959c86f0791', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.457537] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating folder: Project (d914830aaf454e26b77cbb46722764ba). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.458514] env[62974]: DEBUG nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 593.458753] env[62974]: DEBUG nova.network.neutron [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.461123] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-739b4ccb-cf53-433f-9327-e817f1e2f5b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.466988] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 593.466988] env[62974]: value = "task-2653744" [ 593.466988] env[62974]: _type = "Task" [ 593.466988] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.481627] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653744, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.484081] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created folder: Project (d914830aaf454e26b77cbb46722764ba) in parent group-v535199. [ 593.484081] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating folder: Instances. Parent ref: group-v535249. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.484081] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16a26b57-ceb3-47dc-b923-38cb7beb6614 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.498486] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created folder: Instances in parent group-v535249. [ 593.498486] env[62974]: DEBUG oslo.service.loopingcall [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.498486] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 593.498486] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f92a68ae-9c23-4707-b6a9-d879721c62e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.524175] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.524175] env[62974]: value = "task-2653747" [ 593.524175] env[62974]: _type = "Task" [ 593.524175] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.534225] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653747, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.536048] env[62974]: DEBUG nova.policy [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84861fd0e88640529eb573045514dff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39e59f58f7c24529bfce4bcc18cc7925', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 593.619546] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653743, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.952077] env[62974]: DEBUG nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 593.953196] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ffcad3-0ff0-4610-b0ee-c8c3d46922d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.964209] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc47a2a-2960-45de-acd3-751877e45306 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.005539] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66ab0cb-927c-4a0d-b7f3-a8a2da24e693 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.010301] env[62974]: DEBUG nova.compute.manager [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 594.010301] env[62974]: DEBUG nova.compute.manager [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing instance network info cache due to event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 594.010301] env[62974]: DEBUG oslo_concurrency.lockutils [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] Acquiring lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.010301] env[62974]: DEBUG oslo_concurrency.lockutils [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] Acquired lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.010562] env[62974]: DEBUG nova.network.neutron [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 594.018941] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.023281] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03bcea0-dbe0-4dd3-8848-371cb53accd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.048136] env[62974]: DEBUG nova.compute.provider_tree [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.054274] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653747, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.115032] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653743, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640417} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.115315] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] ecde0e49-c344-4003-b858-8312c1ac344f/ecde0e49-c344-4003-b858-8312c1ac344f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 594.115574] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 594.115779] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fafe5320-8fd0-4763-8ea0-271d9d9d79d4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.123383] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 594.123383] env[62974]: value = "task-2653748" [ 594.123383] env[62974]: _type = "Task" [ 594.123383] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.135785] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.138483] env[62974]: DEBUG nova.network.neutron [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Successfully created port: a9d97dbe-61b9-4710-a3f6-ef2caed51d6b {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.496372] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653744, 'name': ReconfigVM_Task, 'duration_secs': 0.667946} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.496725] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 586a3541-060f-4859-8507-17faa637b17e/586a3541-060f-4859-8507-17faa637b17e.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 594.497436] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c45c7a8-af91-4e58-8c00-daa769398b1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.503887] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 594.503887] env[62974]: value = "task-2653749" [ 594.503887] env[62974]: _type = "Task" [ 594.503887] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.519461] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653749, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.541114] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653747, 'name': CreateVM_Task, 'duration_secs': 0.926272} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.541364] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 594.542938] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.542938] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.542938] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 594.543625] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecaae172-87ba-474f-b35b-b61132ddabdc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.550476] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 594.550476] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dcf82c-3b75-7afd-0eb2-83b899b7bb10" [ 594.550476] env[62974]: _type = "Task" [ 594.550476] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.565715] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dcf82c-3b75-7afd-0eb2-83b899b7bb10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.579445] env[62974]: ERROR nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [req-34d4be8b-776b-448d-9840-fc7b964d1ccf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-34d4be8b-776b-448d-9840-fc7b964d1ccf"}]} [ 594.597034] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 594.613102] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 594.613361] env[62974]: DEBUG nova.compute.provider_tree [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.630518] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 594.638014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.638270] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.642722] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067781} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.643162] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 594.644089] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc197308-b4e8-46e9-900a-5b1daca5bd45 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.669964] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] ecde0e49-c344-4003-b858-8312c1ac344f/ecde0e49-c344-4003-b858-8312c1ac344f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 594.671275] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 594.673624] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdccb782-59b6-4718-b53a-0ff6fb71f4bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.702976] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 594.702976] env[62974]: value = "task-2653750" [ 594.702976] env[62974]: _type = "Task" [ 594.702976] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.711825] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653750, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.941221] env[62974]: DEBUG nova.network.neutron [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Successfully updated port: f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 594.950617] env[62974]: DEBUG nova.compute.manager [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Received event network-changed-e786c602-63b4-4d89-80a2-b141043584eb {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 594.951597] env[62974]: DEBUG nova.compute.manager [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Refreshing instance network info cache due to event network-changed-e786c602-63b4-4d89-80a2-b141043584eb. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 594.951597] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Acquiring lock "refresh_cache-ecde0e49-c344-4003-b858-8312c1ac344f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.951597] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Acquired lock "refresh_cache-ecde0e49-c344-4003-b858-8312c1ac344f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.951597] env[62974]: DEBUG nova.network.neutron [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Refreshing network info cache for port e786c602-63b4-4d89-80a2-b141043584eb {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 594.976608] env[62974]: DEBUG nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 595.019472] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 595.019472] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.019472] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 595.019626] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.019626] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 595.019626] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 595.019626] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 595.019626] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 595.019801] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 595.019801] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 595.019801] env[62974]: DEBUG nova.virt.hardware [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 595.019936] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331b5403-c329-4d1c-842b-fd0842d1ff96 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.037804] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.038106] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.038566] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653749, 'name': Rename_Task, 'duration_secs': 0.221024} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.039612] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e036259-84f8-4f64-bbd7-f19f87e5f44f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.046852] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 595.050603] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3163db2-bbe5-4d69-9b51-9dcd102f3f0c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.073075] env[62974]: DEBUG nova.network.neutron [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [{"id": "947659a6-f0ce-4065-a591-6a15666e4ac5", "address": "fa:16:3e:f1:cd:d9", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947659a6-f0", "ovs_interfaceid": "947659a6-f0ce-4065-a591-6a15666e4ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.076783] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 595.076783] env[62974]: value = "task-2653751" [ 595.076783] env[62974]: _type = "Task" [ 595.076783] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.084103] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dcf82c-3b75-7afd-0eb2-83b899b7bb10, 'name': SearchDatastore_Task, 'duration_secs': 0.014904} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.084611] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.084726] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 595.084925] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.085083] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.085263] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 595.085982] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3df2e437-ab38-42ca-bbd0-6ceed9575c42 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.091805] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653751, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.100410] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "5bc466fb-eebb-40b1-ba09-614a25782ecd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.100466] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "5bc466fb-eebb-40b1-ba09-614a25782ecd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.103216] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 595.103216] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 595.103788] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-906a538a-321b-4a55-8daa-ab5142b28a2e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.113595] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 595.113595] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eaccbb-b627-b3a1-7816-6adcd225d68d" [ 595.113595] env[62974]: _type = "Task" [ 595.113595] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.128497] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eaccbb-b627-b3a1-7816-6adcd225d68d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.192249] env[62974]: DEBUG nova.network.neutron [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updated VIF entry in instance network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 595.192631] env[62974]: DEBUG nova.network.neutron [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.217259] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653750, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.275479] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e296853-c86f-461a-b0d7-affd31842bbb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.284937] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d14e8e-bc00-4e82-b8f0-0d5bd41f064a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.319641] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c2c391-6783-4818-aaa4-4645ff50c1a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.329678] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4511b89-3a80-4ecb-a0a7-a65fe2a382f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.343919] env[62974]: DEBUG nova.compute.provider_tree [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 595.446916] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.446916] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.446916] env[62974]: DEBUG nova.network.neutron [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 595.484389] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5210fb3d-1b8c-ade7-323e-de062c22e6e3/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 595.484694] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db04506b-61ce-447d-b1ed-6783df4af591 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.492688] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5210fb3d-1b8c-ade7-323e-de062c22e6e3/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 595.492863] env[62974]: ERROR oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5210fb3d-1b8c-ade7-323e-de062c22e6e3/disk-0.vmdk due to incomplete transfer. [ 595.493102] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4c78d87c-eb67-4ffc-80d4-916bfd999e7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.502729] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5210fb3d-1b8c-ade7-323e-de062c22e6e3/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 595.502923] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Uploaded image e11d081a-50c5-4669-93e4-b37da91d7314 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 595.505664] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 595.505664] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-406d5e2c-e119-4426-befd-5f1452d20d03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.513022] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 595.513022] env[62974]: value = "task-2653752" [ 595.513022] env[62974]: _type = "Task" [ 595.513022] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.520475] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653752, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.578690] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.580197] env[62974]: DEBUG nova.objects.instance [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lazy-loading 'migration_context' on Instance uuid d8b7a39f-ec73-4a87-9b1e-9428ca72f895 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 595.598825] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653751, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.626521] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eaccbb-b627-b3a1-7816-6adcd225d68d, 'name': SearchDatastore_Task, 'duration_secs': 0.015388} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.627952] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77fe7553-d50b-4a91-a213-5b2a5db3d078 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.633307] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 595.633307] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52931253-5b63-7712-e408-53f3e7a54787" [ 595.633307] env[62974]: _type = "Task" [ 595.633307] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.641505] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52931253-5b63-7712-e408-53f3e7a54787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.686238] env[62974]: DEBUG nova.network.neutron [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Updated VIF entry in instance network info cache for port e786c602-63b4-4d89-80a2-b141043584eb. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 595.686238] env[62974]: DEBUG nova.network.neutron [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Updating instance_info_cache with network_info: [{"id": "e786c602-63b4-4d89-80a2-b141043584eb", "address": "fa:16:3e:3b:29:47", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape786c602-63", "ovs_interfaceid": "e786c602-63b4-4d89-80a2-b141043584eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.698172] env[62974]: DEBUG oslo_concurrency.lockutils [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] Releasing lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.698453] env[62974]: DEBUG nova.compute.manager [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Received event network-changed-47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 595.698664] env[62974]: DEBUG nova.compute.manager [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Refreshing instance network info cache due to event network-changed-47b61932-1b0f-4b88-9565-96bf61bb3912. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 595.698887] env[62974]: DEBUG oslo_concurrency.lockutils [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] Acquiring lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.699109] env[62974]: DEBUG oslo_concurrency.lockutils [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] Acquired lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.699284] env[62974]: DEBUG nova.network.neutron [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Refreshing network info cache for port 47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.716987] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653750, 'name': ReconfigVM_Task, 'duration_secs': 0.527673} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.717220] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Reconfigured VM instance instance-00000011 to attach disk [datastore1] ecde0e49-c344-4003-b858-8312c1ac344f/ecde0e49-c344-4003-b858-8312c1ac344f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 595.717922] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6e7b019-032f-4beb-b53e-7c09bad3b0b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.725779] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 595.725779] env[62974]: value = "task-2653753" [ 595.725779] env[62974]: _type = "Task" [ 595.725779] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.733941] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653753, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.869459] env[62974]: ERROR nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] [req-b103412c-f526-4f65-ba1d-097f520aa69b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b103412c-f526-4f65-ba1d-097f520aa69b"}]} [ 595.888880] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 595.907770] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 595.908064] env[62974]: DEBUG nova.compute.provider_tree [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 595.924715] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 595.948012] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 595.993245] env[62974]: DEBUG nova.network.neutron [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.010490] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "6e81e765-4fe3-42a7-a0ba-9860be897a70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.010798] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "6e81e765-4fe3-42a7-a0ba-9860be897a70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.024607] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653752, 'name': Destroy_Task, 'duration_secs': 0.349066} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.025155] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Destroyed the VM [ 596.025155] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 596.025640] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7fc094a7-2248-45fa-bdb9-d14cb7a2fae8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.033511] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 596.033511] env[62974]: value = "task-2653754" [ 596.033511] env[62974]: _type = "Task" [ 596.033511] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.047048] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653754, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.097025] env[62974]: DEBUG nova.objects.base [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 596.097025] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653751, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.097025] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4220d95-9acf-420f-924b-d354a8ed4cc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.125830] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a61ee2a-ec27-4932-8474-1c7c95efccd4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.130054] env[62974]: DEBUG oslo_vmware.api [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 596.130054] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5203bf97-5088-3386-c317-229954dde044" [ 596.130054] env[62974]: _type = "Task" [ 596.130054] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.140095] env[62974]: DEBUG oslo_vmware.api [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5203bf97-5088-3386-c317-229954dde044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.150996] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52931253-5b63-7712-e408-53f3e7a54787, 'name': SearchDatastore_Task, 'duration_secs': 0.032286} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.151554] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.151817] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6dc914e9-bce5-4a19-a919-ae94981ea800/6dc914e9-bce5-4a19-a919-ae94981ea800.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.152092] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e66b4e5b-f2e4-498d-adc5-2da4d8de1858 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.159931] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 596.159931] env[62974]: value = "task-2653755" [ 596.159931] env[62974]: _type = "Task" [ 596.159931] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.169125] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.191822] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Releasing lock "refresh_cache-ecde0e49-c344-4003-b858-8312c1ac344f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.192251] env[62974]: DEBUG nova.compute.manager [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Received event network-vif-plugged-b07f0ace-3474-4ef6-81c7-2959c86f0791 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 596.192534] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Acquiring lock "6dc914e9-bce5-4a19-a919-ae94981ea800-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.192770] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.192952] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.193159] env[62974]: DEBUG nova.compute.manager [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] No waiting events found dispatching network-vif-plugged-b07f0ace-3474-4ef6-81c7-2959c86f0791 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 596.193342] env[62974]: WARNING nova.compute.manager [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Received unexpected event network-vif-plugged-b07f0ace-3474-4ef6-81c7-2959c86f0791 for instance with vm_state building and task_state spawning. [ 596.193513] env[62974]: DEBUG nova.compute.manager [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Received event network-changed-b07f0ace-3474-4ef6-81c7-2959c86f0791 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 596.193670] env[62974]: DEBUG nova.compute.manager [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Refreshing instance network info cache due to event network-changed-b07f0ace-3474-4ef6-81c7-2959c86f0791. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 596.193866] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Acquiring lock "refresh_cache-6dc914e9-bce5-4a19-a919-ae94981ea800" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.194014] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Acquired lock "refresh_cache-6dc914e9-bce5-4a19-a919-ae94981ea800" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.194187] env[62974]: DEBUG nova.network.neutron [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Refreshing network info cache for port b07f0ace-3474-4ef6-81c7-2959c86f0791 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 596.242189] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653753, 'name': Rename_Task, 'duration_secs': 0.232911} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.242640] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 596.243213] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb3deb91-9a6d-46b4-82f0-b0d977f5febf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.251922] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 596.251922] env[62974]: value = "task-2653756" [ 596.251922] env[62974]: _type = "Task" [ 596.251922] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.260931] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.262166] env[62974]: DEBUG nova.network.neutron [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Updating instance_info_cache with network_info: [{"id": "f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf", "address": "fa:16:3e:e4:2d:ad", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b6e9bd-89", "ovs_interfaceid": "f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.545885] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653754, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.570025] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b39fc65-25cd-4760-845c-3fc1b7cb931a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.575784] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2676ee7-7b89-48db-84da-8d27bfda2c5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.626611] env[62974]: DEBUG oslo_vmware.api [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653751, 'name': PowerOnVM_Task, 'duration_secs': 1.229873} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.627447] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d8e339-8045-4bc9-8276-6b0dbff611e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.630526] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 596.630763] env[62974]: INFO nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Took 12.28 seconds to spawn the instance on the hypervisor. [ 596.630991] env[62974]: DEBUG nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 596.631899] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c0879e-a2f8-4d10-9bbb-5a52baed48b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.648814] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e83418e-283c-4ed1-b53d-d050e3796073 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.654976] env[62974]: DEBUG oslo_vmware.api [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5203bf97-5088-3386-c317-229954dde044, 'name': SearchDatastore_Task, 'duration_secs': 0.012386} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.660090] env[62974]: DEBUG nova.network.neutron [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updated VIF entry in instance network info cache for port 47b61932-1b0f-4b88-9565-96bf61bb3912. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.660587] env[62974]: DEBUG nova.network.neutron [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updating instance_info_cache with network_info: [{"id": "47b61932-1b0f-4b88-9565-96bf61bb3912", "address": "fa:16:3e:57:5f:fc", "network": {"id": "5ff66071-9852-4e55-abe0-836a3842a025", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2009514308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2533bd1aab82429f8e7f4eb68cbc94e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b61932-1b", "ovs_interfaceid": "47b61932-1b0f-4b88-9565-96bf61bb3912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.661900] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.678776] env[62974]: DEBUG nova.compute.provider_tree [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 596.684026] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653755, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.707370] env[62974]: DEBUG nova.network.neutron [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Successfully updated port: a9d97dbe-61b9-4710-a3f6-ef2caed51d6b {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.761676] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653756, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.764575] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Received event network-changed-c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 596.764761] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Refreshing instance network info cache due to event network-changed-c1dbf093-9abb-4c1d-a4bc-163058074d4f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 596.764967] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Acquiring lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.765132] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Acquired lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.765286] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Refreshing network info cache for port c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 596.768388] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.768664] env[62974]: DEBUG nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance network_info: |[{"id": "f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf", "address": "fa:16:3e:e4:2d:ad", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b6e9bd-89", "ovs_interfaceid": "f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 596.769297] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:2d:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.778157] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating folder: Project (fc2dc33e40e549d1a025e4b883c4dfb1). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.780219] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c72262d3-7c1b-41df-b07b-920a78ff9642 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.789272] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created folder: Project (fc2dc33e40e549d1a025e4b883c4dfb1) in parent group-v535199. [ 596.789578] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating folder: Instances. Parent ref: group-v535252. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.789878] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a848b717-6853-4582-80d0-a3655c98822f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.798144] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created folder: Instances in parent group-v535252. [ 596.798389] env[62974]: DEBUG oslo.service.loopingcall [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 596.798577] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 596.798836] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a84ec1af-e65d-4827-b4bb-0260f1e8a8ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.824132] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.824132] env[62974]: value = "task-2653759" [ 596.824132] env[62974]: _type = "Task" [ 596.824132] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.831745] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653759, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.968682] env[62974]: DEBUG nova.network.neutron [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Updated VIF entry in instance network info cache for port b07f0ace-3474-4ef6-81c7-2959c86f0791. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.970147] env[62974]: DEBUG nova.network.neutron [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Updating instance_info_cache with network_info: [{"id": "b07f0ace-3474-4ef6-81c7-2959c86f0791", "address": "fa:16:3e:73:f8:ad", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb07f0ace-34", "ovs_interfaceid": "b07f0ace-3474-4ef6-81c7-2959c86f0791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.045629] env[62974]: DEBUG oslo_vmware.api [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653754, 'name': RemoveSnapshot_Task, 'duration_secs': 0.722929} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.045924] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 597.046175] env[62974]: INFO nova.compute.manager [None req-e5af3097-d3d8-4b23-95b0-d1b8a8e97438 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Took 14.07 seconds to snapshot the instance on the hypervisor. [ 597.176048] env[62974]: DEBUG oslo_concurrency.lockutils [req-b392f870-ed73-40a6-9ee0-8f8a6209d3b0 req-2af2cc02-c00c-4bcc-871b-5fb8a7b4bd68 service nova] Releasing lock "refresh_cache-1933bc47-1717-48c1-b4a2-492a17573de7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.176048] env[62974]: INFO nova.compute.manager [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Took 28.75 seconds to build instance. [ 597.182598] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565407} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.183127] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6dc914e9-bce5-4a19-a919-ae94981ea800/6dc914e9-bce5-4a19-a919-ae94981ea800.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.183315] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 597.183643] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-832997e9-c214-49da-9255-9390acdd9db2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.190660] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 597.190660] env[62974]: value = "task-2653760" [ 597.190660] env[62974]: _type = "Task" [ 597.190660] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.200885] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.210425] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-1873faa1-dec2-4d17-a71a-c53fea50c09b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.210575] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-1873faa1-dec2-4d17-a71a-c53fea50c09b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.210720] env[62974]: DEBUG nova.network.neutron [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.230081] env[62974]: DEBUG nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 39 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 597.230081] env[62974]: DEBUG nova.compute.provider_tree [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 39 to 40 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 597.230081] env[62974]: DEBUG nova.compute.provider_tree [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 597.266857] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653756, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.339577] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653759, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.473508] env[62974]: DEBUG oslo_concurrency.lockutils [req-493e9b58-fd29-40f2-a10d-068ff8f8dfd2 req-8e1bab91-7d28-4f7a-88ce-400043765c2f service nova] Releasing lock "refresh_cache-6dc914e9-bce5-4a19-a919-ae94981ea800" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.504757] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updated VIF entry in instance network info cache for port c1dbf093-9abb-4c1d-a4bc-163058074d4f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 597.505173] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updating instance_info_cache with network_info: [{"id": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "address": "fa:16:3e:be:01:c4", "network": {"id": "f0afd336-13eb-49da-8643-c6a4c51451d7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-622006440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adf9a2a44db94217bdd7652ef27b5737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1dbf093-9a", "ovs_interfaceid": "c1dbf093-9abb-4c1d-a4bc-163058074d4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.684337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ac4ba8-9bfc-4594-9d10-a76432e1956b tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "586a3541-060f-4859-8507-17faa637b17e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.278s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.700727] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072327} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.701047] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 597.704130] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4994b0a-f92c-48db-8b10-bbe4e5bf487d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.729644] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 6dc914e9-bce5-4a19-a919-ae94981ea800/6dc914e9-bce5-4a19-a919-ae94981ea800.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 597.730469] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69a0b1e2-a59c-4a31-977a-e6c34ee6a7c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.746614] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.808s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.753842] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.219s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.754081] env[62974]: DEBUG nova.objects.instance [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lazy-loading 'resources' on Instance uuid 8f4faa77-4f18-41da-b8d0-efba799d6ec6 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 597.756222] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "28c247f6-3179-425d-ae1c-615151b1e2ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.756497] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "28c247f6-3179-425d-ae1c-615151b1e2ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.762885] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 597.762885] env[62974]: value = "task-2653761" [ 597.762885] env[62974]: _type = "Task" [ 597.762885] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.396534] env[62974]: INFO nova.scheduler.client.report [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Deleted allocations for instance 2313468e-820f-4fff-bdeb-5d542c94584d [ 598.400937] env[62974]: DEBUG nova.network.neutron [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.404566] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Releasing lock "refresh_cache-001557f9-ea50-4e86-9eeb-dd4436791453" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.404806] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Received event network-changed-8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 598.404965] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Refreshing instance network info cache due to event network-changed-8e95b6b2-a646-4f70-9191-7305ffd14c84. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 598.405174] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Acquiring lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.405310] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Acquired lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.405465] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Refreshing network info cache for port 8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.406747] env[62974]: DEBUG nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 598.411638] env[62974]: DEBUG oslo_vmware.api [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653756, 'name': PowerOnVM_Task, 'duration_secs': 1.169145} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.416022] env[62974]: DEBUG nova.compute.manager [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Received event network-vif-plugged-a9d97dbe-61b9-4710-a3f6-ef2caed51d6b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 598.416022] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] Acquiring lock "1873faa1-dec2-4d17-a71a-c53fea50c09b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.416022] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.416022] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.416022] env[62974]: DEBUG nova.compute.manager [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] No waiting events found dispatching network-vif-plugged-a9d97dbe-61b9-4710-a3f6-ef2caed51d6b {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 598.416364] env[62974]: WARNING nova.compute.manager [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Received unexpected event network-vif-plugged-a9d97dbe-61b9-4710-a3f6-ef2caed51d6b for instance with vm_state building and task_state spawning. [ 598.416364] env[62974]: DEBUG nova.compute.manager [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Received event network-changed-a9d97dbe-61b9-4710-a3f6-ef2caed51d6b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 598.416364] env[62974]: DEBUG nova.compute.manager [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Refreshing instance network info cache due to event network-changed-a9d97dbe-61b9-4710-a3f6-ef2caed51d6b. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 598.416364] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] Acquiring lock "refresh_cache-1873faa1-dec2-4d17-a71a-c53fea50c09b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.422637] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 598.422835] env[62974]: INFO nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Took 11.45 seconds to spawn the instance on the hypervisor. [ 598.423019] env[62974]: DEBUG nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 598.425082] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5872ae-029f-40ed-9c9d-de10945019cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.434538] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.434538] env[62974]: WARNING oslo_vmware.common.loopingcall [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] task run outlasted interval by 0.17008999999999996 sec [ 598.439800] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653759, 'name': CreateVM_Task, 'duration_secs': 0.635254} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.441550] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 598.445659] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.445816] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.446154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 598.450715] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2a8f1ca-a136-4bca-b437-855ad08032eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.452029] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.458185] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 598.458185] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1e703-1a06-38d7-dd5e-49e798f8c3df" [ 598.458185] env[62974]: _type = "Task" [ 598.458185] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.466658] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1e703-1a06-38d7-dd5e-49e798f8c3df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.658629] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.658858] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.750223] env[62974]: DEBUG nova.network.neutron [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Updating instance_info_cache with network_info: [{"id": "a9d97dbe-61b9-4710-a3f6-ef2caed51d6b", "address": "fa:16:3e:86:5a:1f", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9d97dbe-61", "ovs_interfaceid": "a9d97dbe-61b9-4710-a3f6-ef2caed51d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.931298] env[62974]: DEBUG oslo_concurrency.lockutils [None req-828a63dc-c7e7-42ef-a580-12aa4d2bf075 tempest-ServerDiagnosticsTest-979988022 tempest-ServerDiagnosticsTest-979988022-project-member] Lock "2313468e-820f-4fff-bdeb-5d542c94584d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.547s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.946764] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653761, 'name': ReconfigVM_Task, 'duration_secs': 0.760026} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.947899] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.951348] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 6dc914e9-bce5-4a19-a919-ae94981ea800/6dc914e9-bce5-4a19-a919-ae94981ea800.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 598.959226] env[62974]: INFO nova.compute.manager [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Took 30.12 seconds to build instance. [ 598.961528] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90eea90e-95df-4ebd-951d-bae831399c6a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.963349] env[62974]: INFO nova.compute.manager [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Rescuing [ 598.963635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.963814] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquired lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.964073] env[62974]: DEBUG nova.network.neutron [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 598.979744] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85bf8c6-1fd6-4857-9c3f-70a574114a79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.988609] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 598.988609] env[62974]: value = "task-2653762" [ 598.988609] env[62974]: _type = "Task" [ 598.988609] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.989210] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1e703-1a06-38d7-dd5e-49e798f8c3df, 'name': SearchDatastore_Task, 'duration_secs': 0.011934} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.989771] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.990080] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 598.990379] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.990622] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.990888] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 598.997462] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81c243f3-57d7-469b-996f-d956c4fef0ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.000256] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c32ce72-9173-4012-b814-16331465c7ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.009135] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653762, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.037132] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 599.037132] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 599.038386] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538972bc-659b-4f25-85a6-e540213309a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.040874] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc1069e-7b92-4619-be8a-5ce7f8cca594 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.049303] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f892870c-b50f-4ee3-b11d-2f53596f3cdf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.053441] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 599.053441] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520d1678-ff13-45ad-e350-ea9419d148c7" [ 599.053441] env[62974]: _type = "Task" [ 599.053441] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.066251] env[62974]: DEBUG nova.compute.provider_tree [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.073797] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520d1678-ff13-45ad-e350-ea9419d148c7, 'name': SearchDatastore_Task, 'duration_secs': 0.00997} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.080024] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba1d0434-c25a-43c8-9605-9404c5c35f44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.084538] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 599.084538] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527de9bd-0586-0c6f-3a09-2ad4b3185433" [ 599.084538] env[62974]: _type = "Task" [ 599.084538] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.093284] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527de9bd-0586-0c6f-3a09-2ad4b3185433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.255499] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-1873faa1-dec2-4d17-a71a-c53fea50c09b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.256168] env[62974]: DEBUG nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Instance network_info: |[{"id": "a9d97dbe-61b9-4710-a3f6-ef2caed51d6b", "address": "fa:16:3e:86:5a:1f", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9d97dbe-61", "ovs_interfaceid": "a9d97dbe-61b9-4710-a3f6-ef2caed51d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 599.256679] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] Acquired lock "refresh_cache-1873faa1-dec2-4d17-a71a-c53fea50c09b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.256879] env[62974]: DEBUG nova.network.neutron [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Refreshing network info cache for port a9d97dbe-61b9-4710-a3f6-ef2caed51d6b {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 599.261029] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:5a:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9d97dbe-61b9-4710-a3f6-ef2caed51d6b', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.268575] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating folder: Project (39e59f58f7c24529bfce4bcc18cc7925). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 599.269894] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03784f5d-468f-462d-ae93-4075c67fa334 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.284629] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created folder: Project (39e59f58f7c24529bfce4bcc18cc7925) in parent group-v535199. [ 599.284867] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating folder: Instances. Parent ref: group-v535255. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 599.285182] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c7c39f3-3537-4b92-beb1-1a3950b88171 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.294995] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created folder: Instances in parent group-v535255. [ 599.295216] env[62974]: DEBUG oslo.service.loopingcall [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.295497] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 599.295694] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0b7b90c-5776-436f-9db8-9807bc7b3aa4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.311508] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Updated VIF entry in instance network info cache for port 8e95b6b2-a646-4f70-9191-7305ffd14c84. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 599.311896] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Updating instance_info_cache with network_info: [{"id": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "address": "fa:16:3e:cd:f7:77", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e95b6b2-a6", "ovs_interfaceid": "8e95b6b2-a646-4f70-9191-7305ffd14c84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.324661] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.324661] env[62974]: value = "task-2653765" [ 599.324661] env[62974]: _type = "Task" [ 599.324661] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.338459] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653765, 'name': CreateVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.468987] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11feaf7a-e1f2-421f-9c9b-d7e920dc9fb4 tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "ecde0e49-c344-4003-b858-8312c1ac344f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.640s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.500417] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653762, 'name': Rename_Task, 'duration_secs': 0.1809} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.502955] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 599.503238] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55b631ee-b447-4b6b-b3a1-4eb513908ade {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.509689] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 599.509689] env[62974]: value = "task-2653766" [ 599.509689] env[62974]: _type = "Task" [ 599.509689] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.517879] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.571222] env[62974]: DEBUG nova.scheduler.client.report [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 599.599916] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527de9bd-0586-0c6f-3a09-2ad4b3185433, 'name': SearchDatastore_Task, 'duration_secs': 0.014051} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.603286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.603992] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 599.604434] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d1754eb-f3aa-4231-b047-92c1c96c854c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.611901] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 599.611901] env[62974]: value = "task-2653767" [ 599.611901] env[62974]: _type = "Task" [ 599.611901] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.622341] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653767, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.756326] env[62974]: DEBUG nova.network.neutron [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.820135] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Releasing lock "refresh_cache-f9adcd7e-58a0-433c-8602-cca814b84aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.820135] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Received event network-vif-plugged-f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 599.820135] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Acquiring lock "b3827c67-9075-4a53-9f9e-8651e3f4b211-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.820135] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.820135] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.820915] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] No waiting events found dispatching network-vif-plugged-f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 599.820915] env[62974]: WARNING nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Received unexpected event network-vif-plugged-f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf for instance with vm_state building and task_state spawning. [ 599.820915] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Received event network-changed-f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 599.820915] env[62974]: DEBUG nova.compute.manager [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Refreshing instance network info cache due to event network-changed-f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 599.820915] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Acquiring lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.821179] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Acquired lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.821179] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Refreshing network info cache for port f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 599.836932] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653765, 'name': CreateVM_Task, 'duration_secs': 0.353517} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.838187] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 599.838902] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.839082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.839405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 599.839917] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-768c26dc-771d-4842-aff2-746460e695ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.846317] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 599.846317] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5235ef11-45fe-6dbe-67bb-0977c9a4436d" [ 599.846317] env[62974]: _type = "Task" [ 599.846317] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.856803] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5235ef11-45fe-6dbe-67bb-0977c9a4436d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.899660] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "001557f9-ea50-4e86-9eeb-dd4436791453" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.899885] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "001557f9-ea50-4e86-9eeb-dd4436791453" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.900093] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "001557f9-ea50-4e86-9eeb-dd4436791453-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.900339] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "001557f9-ea50-4e86-9eeb-dd4436791453-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.900561] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "001557f9-ea50-4e86-9eeb-dd4436791453-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.903525] env[62974]: INFO nova.compute.manager [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Terminating instance [ 599.972661] env[62974]: DEBUG nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 600.028161] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653766, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.077987] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.324s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.080964] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.504s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.084311] env[62974]: INFO nova.compute.claims [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.124055] env[62974]: INFO nova.scheduler.client.report [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Deleted allocations for instance 8f4faa77-4f18-41da-b8d0-efba799d6ec6 [ 600.137228] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653767, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.152273] env[62974]: DEBUG nova.network.neutron [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Updated VIF entry in instance network info cache for port a9d97dbe-61b9-4710-a3f6-ef2caed51d6b. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 600.154119] env[62974]: DEBUG nova.network.neutron [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Updating instance_info_cache with network_info: [{"id": "a9d97dbe-61b9-4710-a3f6-ef2caed51d6b", "address": "fa:16:3e:86:5a:1f", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9d97dbe-61", "ovs_interfaceid": "a9d97dbe-61b9-4710-a3f6-ef2caed51d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.261027] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Releasing lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.359507] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5235ef11-45fe-6dbe-67bb-0977c9a4436d, 'name': SearchDatastore_Task, 'duration_secs': 0.010105} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.360328] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.360328] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.360328] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.360534] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.360636] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.360922] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-149d97a8-246f-4c06-a765-e3fcda4cd598 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.372037] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.372037] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 600.376037] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30519387-838f-4423-a9aa-d88ad5b85030 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.384358] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 600.384358] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5245a059-7c53-213f-9e4a-8e24f0e82eec" [ 600.384358] env[62974]: _type = "Task" [ 600.384358] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.397156] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5245a059-7c53-213f-9e4a-8e24f0e82eec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.404132] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2f3ba2-48a2-475b-a945-681c10e49624 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.409246] env[62974]: DEBUG nova.compute.manager [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 600.409497] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.409758] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Suspending the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 600.415183] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a2935b-b5ef-4536-9495-2a2a41ecc546 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.417423] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-da71e828-02d6-473b-a847-5bf42bdff9c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.425069] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 600.426440] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e09ca3d-2fe1-41d4-94ad-1839ff8c24ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.428198] env[62974]: DEBUG oslo_vmware.api [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] Waiting for the task: (returnval){ [ 600.428198] env[62974]: value = "task-2653768" [ 600.428198] env[62974]: _type = "Task" [ 600.428198] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.437282] env[62974]: DEBUG oslo_vmware.api [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 600.437282] env[62974]: value = "task-2653769" [ 600.437282] env[62974]: _type = "Task" [ 600.437282] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.446352] env[62974]: DEBUG oslo_vmware.api [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] Task: {'id': task-2653768, 'name': SuspendVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.449821] env[62974]: DEBUG oslo_vmware.api [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653769, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.521569] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.529036] env[62974]: DEBUG oslo_vmware.api [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653766, 'name': PowerOnVM_Task, 'duration_secs': 0.922332} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.532238] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 600.532466] env[62974]: INFO nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Took 10.91 seconds to spawn the instance on the hypervisor. [ 600.532624] env[62974]: DEBUG nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.533552] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8d54bc-4336-4a45-9350-539328494856 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.627419] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653767, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640045} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.631528] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 600.631770] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 600.637945] env[62974]: DEBUG oslo_concurrency.lockutils [None req-87c99c7e-5e77-47ff-8a05-b363a1db991c tempest-TenantUsagesTestJSON-964025540 tempest-TenantUsagesTestJSON-964025540-project-member] Lock "8f4faa77-4f18-41da-b8d0-efba799d6ec6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.639523] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bb9dd4f-5b4f-4ceb-8e88-0c3e29b6eb29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.644465] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "69597c3f-ccb2-474d-bb7c-629c5da0b456" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.644597] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "69597c3f-ccb2-474d-bb7c-629c5da0b456" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.650264] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 600.650264] env[62974]: value = "task-2653770" [ 600.650264] env[62974]: _type = "Task" [ 600.650264] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.664225] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c94cd81-4442-4c3f-ad75-6ba6734518fc req-e16c2c4a-2d87-41a1-90d1-36417ef39e8e service nova] Releasing lock "refresh_cache-1873faa1-dec2-4d17-a71a-c53fea50c09b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.664737] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653770, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.688096] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Updated VIF entry in instance network info cache for port f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 600.688870] env[62974]: DEBUG nova.network.neutron [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Updating instance_info_cache with network_info: [{"id": "f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf", "address": "fa:16:3e:e4:2d:ad", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b6e9bd-89", "ovs_interfaceid": "f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.894812] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5245a059-7c53-213f-9e4a-8e24f0e82eec, 'name': SearchDatastore_Task, 'duration_secs': 0.012482} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.895749] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24dfa3e5-f6e1-41df-a230-e93c3a108361 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.901258] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 600.901258] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529f6967-ff60-3c0c-3e71-c3fa607fb32e" [ 600.901258] env[62974]: _type = "Task" [ 600.901258] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.910575] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529f6967-ff60-3c0c-3e71-c3fa607fb32e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.938777] env[62974]: DEBUG oslo_vmware.api [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] Task: {'id': task-2653768, 'name': SuspendVM_Task} progress is 62%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.949742] env[62974]: DEBUG oslo_vmware.api [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653769, 'name': PowerOffVM_Task, 'duration_secs': 0.212474} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.949742] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 600.949742] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 600.949742] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a5299e4-a88c-4c36-810d-09bcf755fcc8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.013940] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 601.014661] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 601.014661] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Deleting the datastore file [datastore2] 001557f9-ea50-4e86-9eeb-dd4436791453 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 601.014858] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42f50323-a628-4927-b231-6a2ddd18db17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.023609] env[62974]: DEBUG oslo_vmware.api [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for the task: (returnval){ [ 601.023609] env[62974]: value = "task-2653772" [ 601.023609] env[62974]: _type = "Task" [ 601.023609] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.032560] env[62974]: DEBUG oslo_vmware.api [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.057226] env[62974]: INFO nova.compute.manager [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Took 31.58 seconds to build instance. [ 601.174459] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653770, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126596} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.174459] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 601.174459] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585219a4-de51-4215-a48e-670af8881f34 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.192543] env[62974]: DEBUG oslo_concurrency.lockutils [req-81b89c30-f4e0-4d88-8d81-c38c0343b65c req-6d6b4f7b-6dea-47aa-8f5f-112fd3c0deb9 service nova] Releasing lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.202508] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 601.206047] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-325615b0-7e2c-46bf-b987-2b5c17c3a385 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.226599] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 601.226599] env[62974]: value = "task-2653773" [ 601.226599] env[62974]: _type = "Task" [ 601.226599] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.240412] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653773, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.417149] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529f6967-ff60-3c0c-3e71-c3fa607fb32e, 'name': SearchDatastore_Task, 'duration_secs': 0.012942} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.417512] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.417833] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 601.418168] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dab2254c-365c-40f1-9daa-8542ed311e2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.428088] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 601.428088] env[62974]: value = "task-2653774" [ 601.428088] env[62974]: _type = "Task" [ 601.428088] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.449087] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.449087] env[62974]: DEBUG oslo_vmware.api [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] Task: {'id': task-2653768, 'name': SuspendVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.532673] env[62974]: DEBUG oslo_vmware.api [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Task: {'id': task-2653772, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435161} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.535347] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 601.535639] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 601.535743] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 601.535951] env[62974]: INFO nova.compute.manager [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Took 1.13 seconds to destroy the instance on the hypervisor. [ 601.536204] env[62974]: DEBUG oslo.service.loopingcall [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.536564] env[62974]: DEBUG nova.compute.manager [-] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 601.536665] env[62974]: DEBUG nova.network.neutron [-] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.558216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d2705c4-cbe6-4c6f-ae05-4a5fd00a65f7 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.092s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.684758] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f5ed98-f25c-44d1-8679-3994f6e86056 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.694856] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c73bcce-f901-4714-b928-6ae42887ec31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.734440] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0915a2db-c8e8-44e1-a4fd-aae13c63d240 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.742535] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653773, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.746077] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd48a31-86ce-46ff-9f7d-397bf11a8345 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.762569] env[62974]: DEBUG nova.compute.provider_tree [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.809404] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 601.809404] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2966cd1-eae3-4826-bf75-e519bb38a25d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.826877] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 601.826877] env[62974]: value = "task-2653775" [ 601.826877] env[62974]: _type = "Task" [ 601.826877] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.840090] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.950828] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653774, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.957619] env[62974]: DEBUG oslo_vmware.api [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] Task: {'id': task-2653768, 'name': SuspendVM_Task, 'duration_secs': 1.036492} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.958524] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Suspended the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 601.959672] env[62974]: DEBUG nova.compute.manager [None req-2aa70fab-2710-479c-9949-5b54d456bdce tempest-ServersAdminNegativeTestJSON-1825612179 tempest-ServersAdminNegativeTestJSON-1825612179-project-admin] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 601.960647] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27925acf-2cb3-4a34-9799-8961bfb87aa1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.062051] env[62974]: DEBUG nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.244559] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653773, 'name': ReconfigVM_Task, 'duration_secs': 0.624589} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.244861] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Reconfigured VM instance instance-00000013 to attach disk [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 602.245503] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-037fc56e-0fe7-4ee0-a3bb-99b34119f6b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.251930] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 602.251930] env[62974]: value = "task-2653776" [ 602.251930] env[62974]: _type = "Task" [ 602.251930] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.264440] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653776, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.266643] env[62974]: DEBUG nova.scheduler.client.report [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 602.339021] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653775, 'name': PowerOffVM_Task, 'duration_secs': 0.478641} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.339021] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 602.339021] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41886a2-4108-4663-b4af-bd4c001e5469 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.361372] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede52c63-1f43-41db-8517-e0b9dbb19001 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.405310] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 602.405643] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5247f90d-6fdb-4b9a-b077-4dcc1f13ffa0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.412195] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 602.412195] env[62974]: value = "task-2653777" [ 602.412195] env[62974]: _type = "Task" [ 602.412195] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.426889] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 602.427146] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.427424] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.428296] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.428296] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.428296] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-866a64e6-4450-48c8-a9f8-0868c9df277e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.441183] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718397} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.443433] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 602.443433] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 602.443433] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.443433] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.443691] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a189e515-a611-4f53-9f45-3390be18e042 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.445759] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f1e8477-2793-4586-85e9-5edfa11a2b8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.451592] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 602.451592] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c8b7a9-389e-7910-3c05-9061f9c7fc66" [ 602.451592] env[62974]: _type = "Task" [ 602.451592] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.456089] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 602.456089] env[62974]: value = "task-2653778" [ 602.456089] env[62974]: _type = "Task" [ 602.456089] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.464988] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c8b7a9-389e-7910-3c05-9061f9c7fc66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.468369] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.596126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.662740] env[62974]: DEBUG oslo_concurrency.lockutils [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "6dc914e9-bce5-4a19-a919-ae94981ea800" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.662989] env[62974]: DEBUG oslo_concurrency.lockutils [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.663178] env[62974]: DEBUG nova.compute.manager [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 602.664081] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce07395-c888-47ac-b6dd-fafb4d14f37b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.672459] env[62974]: DEBUG nova.compute.manager [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 602.673023] env[62974]: DEBUG nova.objects.instance [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lazy-loading 'flavor' on Instance uuid 6dc914e9-bce5-4a19-a919-ae94981ea800 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 602.761476] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653776, 'name': Rename_Task, 'duration_secs': 0.293145} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.761816] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 602.762167] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0e7aa4d-683f-4684-b0ee-b2389431217a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.770238] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 602.770238] env[62974]: value = "task-2653779" [ 602.770238] env[62974]: _type = "Task" [ 602.770238] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.774375] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.775447] env[62974]: DEBUG nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 602.778808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.963s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.778808] env[62974]: DEBUG nova.objects.instance [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lazy-loading 'resources' on Instance uuid a8446718-f2df-4bad-b5e3-537f19daa823 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 602.784741] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.875625] env[62974]: DEBUG nova.network.neutron [-] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.965475] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c8b7a9-389e-7910-3c05-9061f9c7fc66, 'name': SearchDatastore_Task, 'duration_secs': 0.026932} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.966568] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8c76959-94e1-493d-b967-41a877a5c2c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.972285] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0752} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.972987] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.973836] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec0327c-8122-4955-b57f-e3813872542e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.977591] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 602.977591] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a7ae83-03fb-1b79-a160-b7550c00e302" [ 602.977591] env[62974]: _type = "Task" [ 602.977591] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.002715] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.003575] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7625524-617b-4656-8e22-a7d3b12ced28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.023011] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a7ae83-03fb-1b79-a160-b7550c00e302, 'name': SearchDatastore_Task, 'duration_secs': 0.012469} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.023990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.024160] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 586a3541-060f-4859-8507-17faa637b17e/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. {{(pid=62974) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 603.024573] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb32a454-ecb9-44a7-a1ad-d0c62c85fd11 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.031562] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 603.031562] env[62974]: value = "task-2653780" [ 603.031562] env[62974]: _type = "Task" [ 603.031562] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.031900] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 603.031900] env[62974]: value = "task-2653781" [ 603.031900] env[62974]: _type = "Task" [ 603.031900] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.046849] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.046849] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653780, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.247760] env[62974]: DEBUG nova.compute.manager [req-fbf50498-dcf7-4a20-abe5-fb5d0a8d2d48 req-7b202bbf-ecb2-4a43-a0bf-25bb4f948be7 service nova] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Received event network-vif-deleted-c1dbf093-9abb-4c1d-a4bc-163058074d4f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 603.284785] env[62974]: DEBUG nova.compute.utils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.291356] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653779, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.292416] env[62974]: DEBUG nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 603.292671] env[62974]: DEBUG nova.network.neutron [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.364744] env[62974]: DEBUG nova.policy [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f91e6f005d44cb480912c2347243941', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df4c0c02aa7649ac8ded32754fa9613f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 603.381585] env[62974]: INFO nova.compute.manager [-] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Took 1.84 seconds to deallocate network for instance. [ 603.548096] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653781, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.548213] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653780, 'name': ReconfigVM_Task, 'duration_secs': 0.320155} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.548431] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 603.549099] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd7ad2c1-f99c-46c5-926a-a7172eff26d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.555098] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 603.555098] env[62974]: value = "task-2653782" [ 603.555098] env[62974]: _type = "Task" [ 603.555098] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.564905] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653782, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.619777] env[62974]: DEBUG nova.compute.manager [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.623456] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deaf1ed7-0c50-4ab4-a22f-aead2bd3c252 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.685736] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 603.685736] env[62974]: DEBUG nova.network.neutron [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Successfully created port: 39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.686336] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf45783d-51d1-462f-93b2-cb65b81d2a93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.695028] env[62974]: DEBUG oslo_vmware.api [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 603.695028] env[62974]: value = "task-2653783" [ 603.695028] env[62974]: _type = "Task" [ 603.695028] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.707300] env[62974]: DEBUG oslo_vmware.api [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.761037] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e54aafd-5774-463d-9d80-481789ddc94a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.767586] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8701913e-540f-4b9b-90f7-03b09f09c42b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.780707] env[62974]: DEBUG oslo_vmware.api [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653779, 'name': PowerOnVM_Task, 'duration_secs': 0.575834} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.812588] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.812909] env[62974]: INFO nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Took 11.48 seconds to spawn the instance on the hypervisor. [ 603.813397] env[62974]: DEBUG nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.814064] env[62974]: DEBUG nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 603.821035] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35d3d42-9782-48c9-bc0d-eef77eb3b5c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.822800] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6e151d-db6b-4300-863a-31bb041930ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.833034] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98aff1a-c22b-4ef6-a90c-44d35794ec59 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.851823] env[62974]: DEBUG nova.compute.provider_tree [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.891484] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.051525] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565944} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.052103] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 586a3541-060f-4859-8507-17faa637b17e/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. [ 604.053895] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fb042b-6130-45ff-8483-0ac5dd87b123 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.081009] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 586a3541-060f-4859-8507-17faa637b17e/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 604.084294] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3de15cb1-0b8b-4032-8cef-e4f17c1d2068 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.096841] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653782, 'name': Rename_Task, 'duration_secs': 0.153898} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.097116] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 604.097712] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27be9a55-fb78-4e19-96f1-fcaebdf22d68 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.102052] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 604.102052] env[62974]: value = "task-2653784" [ 604.102052] env[62974]: _type = "Task" [ 604.102052] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.106016] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 604.106016] env[62974]: value = "task-2653785" [ 604.106016] env[62974]: _type = "Task" [ 604.106016] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.111742] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653784, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.116085] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.140481] env[62974]: INFO nova.compute.manager [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] instance snapshotting [ 604.143844] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eed3f74-5ef0-499c-b0bd-9f2d46aef025 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.163942] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f608ab8f-306d-4b51-a148-7ac2bf178322 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.206438] env[62974]: DEBUG oslo_vmware.api [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653783, 'name': PowerOffVM_Task, 'duration_secs': 0.338458} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.206765] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 604.206992] env[62974]: DEBUG nova.compute.manager [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 604.207902] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f774057c-db64-44c1-ac0f-bfecfed6f2ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.349960] env[62974]: INFO nova.compute.manager [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Took 33.88 seconds to build instance. [ 604.355895] env[62974]: DEBUG nova.scheduler.client.report [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 604.623165] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653784, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.626964] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653785, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.674818] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 604.675381] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f35cd82c-d559-4dcb-9118-60a741febdf2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.683262] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 604.683262] env[62974]: value = "task-2653786" [ 604.683262] env[62974]: _type = "Task" [ 604.683262] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.696962] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653786, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.724419] env[62974]: DEBUG oslo_concurrency.lockutils [None req-647f9b73-3608-4d11-8ec2-048580d913e8 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.061s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.829399] env[62974]: DEBUG nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 604.854776] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d97faa53-d8bd-4607-bf82-45debcab5f0d tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.396s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.861124] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.083s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.870041] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.166s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.870041] env[62974]: DEBUG nova.objects.instance [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lazy-loading 'resources' on Instance uuid 2a498460-fced-410b-8b33-3595a2ac6753 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 604.871977] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 604.876142] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.876329] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 604.877786] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.877786] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 604.877786] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 604.877786] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 604.877786] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 604.878117] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 604.878117] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 604.878117] env[62974]: DEBUG nova.virt.hardware [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 604.878756] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7369d0-22ba-4b25-81df-98d09e6ba3ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.888709] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f389b8f-e4df-489d-a407-9bf3316df5ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.932686] env[62974]: INFO nova.scheduler.client.report [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted allocations for instance a8446718-f2df-4bad-b5e3-537f19daa823 [ 605.118206] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653784, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.122730] env[62974]: DEBUG oslo_vmware.api [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653785, 'name': PowerOnVM_Task, 'duration_secs': 0.993152} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.122730] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 605.122951] env[62974]: INFO nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Took 10.15 seconds to spawn the instance on the hypervisor. [ 605.123499] env[62974]: DEBUG nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 605.124910] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc48a095-8ae8-4a25-b741-9fcb6e6c101d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.195680] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653786, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.358905] env[62974]: DEBUG nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 605.449757] env[62974]: DEBUG oslo_concurrency.lockutils [None req-85e51ec0-e112-4d71-8c3c-b80bfa78f4f5 tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "a8446718-f2df-4bad-b5e3-537f19daa823" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.235s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.614864] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653784, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.646679] env[62974]: INFO nova.compute.manager [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Took 33.78 seconds to build instance. [ 605.704160] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653786, 'name': CreateSnapshot_Task, 'duration_secs': 0.729577} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.706850] env[62974]: DEBUG nova.network.neutron [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Successfully updated port: 39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.708056] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 605.709237] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844134df-cb51-448c-b6e6-2c83ba29f86e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.889756] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.945150] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f73a850-9586-4ff7-858a-c6daba1397f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.954495] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d475fb-258f-43c9-8839-7c38e177f5c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.999796] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75f7388-5f5b-4c41-94f9-3ee7c9a66776 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.007159] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92146d9-6e91-4630-af41-cda46b7110a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.027030] env[62974]: DEBUG nova.compute.provider_tree [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.116133] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653784, 'name': ReconfigVM_Task, 'duration_secs': 1.81287} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.116623] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 586a3541-060f-4859-8507-17faa637b17e/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 606.118030] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57d00f6-22b9-46f4-b76d-408fc4ac4898 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.148995] env[62974]: DEBUG oslo_concurrency.lockutils [None req-62ab863d-49b3-4654-a955-63fa1db38d7e tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.593s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.149475] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97ca62bc-0e52-4e61-bae3-5e7c90c6466f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.165846] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 606.165846] env[62974]: value = "task-2653787" [ 606.165846] env[62974]: _type = "Task" [ 606.165846] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.175370] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653787, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.210849] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.210849] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquired lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.210849] env[62974]: DEBUG nova.network.neutron [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.232812] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 606.234063] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a2bf0dd6-57b5-4c72-a64b-1add7bd0f140 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.243765] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 606.243765] env[62974]: value = "task-2653788" [ 606.243765] env[62974]: _type = "Task" [ 606.243765] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.254153] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653788, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.490256] env[62974]: DEBUG nova.compute.manager [req-ff755391-c008-402f-a45e-5e4f5f9315b9 req-269ad434-cb77-47cb-8f9b-8f8faf1613e8 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Received event network-vif-plugged-39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 606.490256] env[62974]: DEBUG oslo_concurrency.lockutils [req-ff755391-c008-402f-a45e-5e4f5f9315b9 req-269ad434-cb77-47cb-8f9b-8f8faf1613e8 service nova] Acquiring lock "05742180-08db-45db-9ee0-e359aa8af2f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.490256] env[62974]: DEBUG oslo_concurrency.lockutils [req-ff755391-c008-402f-a45e-5e4f5f9315b9 req-269ad434-cb77-47cb-8f9b-8f8faf1613e8 service nova] Lock "05742180-08db-45db-9ee0-e359aa8af2f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.490403] env[62974]: DEBUG oslo_concurrency.lockutils [req-ff755391-c008-402f-a45e-5e4f5f9315b9 req-269ad434-cb77-47cb-8f9b-8f8faf1613e8 service nova] Lock "05742180-08db-45db-9ee0-e359aa8af2f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.490637] env[62974]: DEBUG nova.compute.manager [req-ff755391-c008-402f-a45e-5e4f5f9315b9 req-269ad434-cb77-47cb-8f9b-8f8faf1613e8 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] No waiting events found dispatching network-vif-plugged-39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 606.490744] env[62974]: WARNING nova.compute.manager [req-ff755391-c008-402f-a45e-5e4f5f9315b9 req-269ad434-cb77-47cb-8f9b-8f8faf1613e8 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Received unexpected event network-vif-plugged-39515e98-a8f4-4af9-9948-b0a5d05d3188 for instance with vm_state building and task_state spawning. [ 606.500792] env[62974]: DEBUG nova.compute.manager [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 606.502532] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aeb5395-d3c9-4a95-b352-f2dec476dd6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.529179] env[62974]: DEBUG nova.scheduler.client.report [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 606.662888] env[62974]: DEBUG nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 606.676050] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653787, 'name': ReconfigVM_Task, 'duration_secs': 0.300394} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.676327] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 606.676596] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4547d2a4-bb50-44a3-9b22-f1f8accf6bb5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.684388] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 606.684388] env[62974]: value = "task-2653789" [ 606.684388] env[62974]: _type = "Task" [ 606.684388] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.693508] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653789, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.756740] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653788, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.767327] env[62974]: DEBUG nova.network.neutron [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.003298] env[62974]: DEBUG nova.network.neutron [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Updating instance_info_cache with network_info: [{"id": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "address": "fa:16:3e:0f:52:51", "network": {"id": "a7b97036-f96d-45ad-817d-464bdde49ab0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1367360885-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df4c0c02aa7649ac8ded32754fa9613f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39515e98-a8", "ovs_interfaceid": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.015941] env[62974]: INFO nova.compute.manager [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] instance snapshotting [ 607.016469] env[62974]: WARNING nova.compute.manager [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 607.022825] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1370cdd7-5e47-4940-8bbe-2a588a61d869 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.046791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.181s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.050593] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.845s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.052402] env[62974]: INFO nova.compute.claims [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.059023] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52978ab9-9532-4efa-b030-5639ab0f7be8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.093733] env[62974]: INFO nova.scheduler.client.report [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted allocations for instance 2a498460-fced-410b-8b33-3595a2ac6753 [ 607.191523] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.198030] env[62974]: DEBUG oslo_vmware.api [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653789, 'name': PowerOnVM_Task, 'duration_secs': 0.470274} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.198030] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 607.203124] env[62974]: DEBUG nova.compute.manager [None req-58c179a8-1788-4777-a557-2613169a0bff tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 607.204898] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c06c95-b1af-4cc0-b219-f4443849705e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.258686] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653788, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.506747] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Releasing lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.507130] env[62974]: DEBUG nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Instance network_info: |[{"id": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "address": "fa:16:3e:0f:52:51", "network": {"id": "a7b97036-f96d-45ad-817d-464bdde49ab0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1367360885-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df4c0c02aa7649ac8ded32754fa9613f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39515e98-a8", "ovs_interfaceid": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 607.507505] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:52:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd098b1c-636f-492d-b5ae-037cb0cae454', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39515e98-a8f4-4af9-9948-b0a5d05d3188', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.515380] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Creating folder: Project (df4c0c02aa7649ac8ded32754fa9613f). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.516285] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8bf82ef-df24-45d0-87c0-ecc4719a9024 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.527428] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Created folder: Project (df4c0c02aa7649ac8ded32754fa9613f) in parent group-v535199. [ 607.527862] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Creating folder: Instances. Parent ref: group-v535260. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.527862] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7db2c42-b614-43dd-bd10-21d32b161a86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.538943] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Created folder: Instances in parent group-v535260. [ 607.539216] env[62974]: DEBUG oslo.service.loopingcall [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.539405] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 607.539609] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49bc8d52-c202-4f16-93ac-79dcbf91b324 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.564585] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.564585] env[62974]: value = "task-2653792" [ 607.564585] env[62974]: _type = "Task" [ 607.564585] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.570841] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 607.575834] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0bcfa13b-a3f1-492b-8901-ef8bda48a9a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.579243] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653792, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.584276] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 607.584276] env[62974]: value = "task-2653793" [ 607.584276] env[62974]: _type = "Task" [ 607.584276] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.601014] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653793, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.607803] env[62974]: DEBUG oslo_concurrency.lockutils [None req-595685d3-81ac-4609-98b8-b2e54ef34eae tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2a498460-fced-410b-8b33-3595a2ac6753" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.802s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.765801] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653788, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.006582] env[62974]: INFO nova.compute.manager [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Rebuilding instance [ 608.079008] env[62974]: DEBUG nova.compute.manager [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 608.079969] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7587057a-22c8-43e2-a2c8-031075b77e8b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.087410] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653792, 'name': CreateVM_Task, 'duration_secs': 0.452545} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.091394] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 608.094337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.094337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.094337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 608.098122] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5e26f54-414b-4b21-aa22-9f317f051f8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.104611] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653793, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.109470] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 608.109470] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e43601-3a62-5e4f-7d66-adf690092a40" [ 608.109470] env[62974]: _type = "Task" [ 608.109470] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.120391] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e43601-3a62-5e4f-7d66-adf690092a40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.264690] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653788, 'name': CloneVM_Task, 'duration_secs': 1.808969} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.265854] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Created linked-clone VM from snapshot [ 608.266450] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99701e1-bcc5-4217-9b49-3c0cc0dcf0e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.284801] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Uploading image 36a29e07-a677-4f06-ba0b-14b8acfbed22 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 608.317701] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 608.317701] env[62974]: value = "vm-535259" [ 608.317701] env[62974]: _type = "VirtualMachine" [ 608.317701] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 608.317701] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-87bd97c7-6992-4d87-900e-139454c73b4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.323312] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lease: (returnval){ [ 608.323312] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52185a8c-a96a-9f67-4b7e-a3efe7f0938f" [ 608.323312] env[62974]: _type = "HttpNfcLease" [ 608.323312] env[62974]: } obtained for exporting VM: (result){ [ 608.323312] env[62974]: value = "vm-535259" [ 608.323312] env[62974]: _type = "VirtualMachine" [ 608.323312] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 608.323625] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the lease: (returnval){ [ 608.323625] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52185a8c-a96a-9f67-4b7e-a3efe7f0938f" [ 608.323625] env[62974]: _type = "HttpNfcLease" [ 608.323625] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 608.333566] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 608.333566] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52185a8c-a96a-9f67-4b7e-a3efe7f0938f" [ 608.333566] env[62974]: _type = "HttpNfcLease" [ 608.333566] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 608.599529] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653793, 'name': CreateSnapshot_Task, 'duration_secs': 0.73243} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.599817] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 608.600592] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81799d6f-189a-4570-a133-cf1adf6d759c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.628978] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e43601-3a62-5e4f-7d66-adf690092a40, 'name': SearchDatastore_Task, 'duration_secs': 0.012635} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.630454] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.630682] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.630918] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.632698] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.632915] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.633958] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b95f819-9604-4d54-a44f-e5d9e73e721b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.637595] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47ae9bd1-f0ff-4260-88a9-c3fd894d92fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.645207] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b7b243-9962-4e42-ba2f-8ab23197552f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.652054] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.652374] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.653513] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d89ccd-0fcf-411c-9eee-3e366c9de891 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.685636] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43e8df8-bc01-4be0-aa94-6cd2353bbdaa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.690166] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 608.690166] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f47008-7a72-c74a-d0fd-485b3f05ed25" [ 608.690166] env[62974]: _type = "Task" [ 608.690166] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.697955] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cb81c3-2fce-477e-9c7d-608e1efba8e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.707937] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f47008-7a72-c74a-d0fd-485b3f05ed25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.719938] env[62974]: DEBUG nova.compute.provider_tree [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 608.832117] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 608.832117] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52185a8c-a96a-9f67-4b7e-a3efe7f0938f" [ 608.832117] env[62974]: _type = "HttpNfcLease" [ 608.832117] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 608.832517] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 608.832517] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52185a8c-a96a-9f67-4b7e-a3efe7f0938f" [ 608.832517] env[62974]: _type = "HttpNfcLease" [ 608.832517] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 608.833769] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733769ce-fcef-4936-9e77-ff296bec14dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.841170] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52490b64-d846-3875-26d1-48f775488b5f/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 608.841298] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52490b64-d846-3875-26d1-48f775488b5f/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 608.964916] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ebf3d9e8-3f54-47ea-91bd-38ac45d8fcc9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.110943] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 609.110943] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39f3bba9-bd5d-47fa-865f-42442ee41479 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.117870] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 609.117870] env[62974]: value = "task-2653795" [ 609.117870] env[62974]: _type = "Task" [ 609.117870] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.132309] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 609.132309] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e3e73df0-e433-45e5-bdc6-6027fd2648d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.140178] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.145285] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 609.145285] env[62974]: value = "task-2653796" [ 609.145285] env[62974]: _type = "Task" [ 609.145285] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.153711] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653796, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.205072] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f47008-7a72-c74a-d0fd-485b3f05ed25, 'name': SearchDatastore_Task, 'duration_secs': 0.02242} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.212587] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-981a52bc-7ec8-4146-b1fc-e54a567e3945 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.219917] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 609.219917] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528989a4-5f43-cedd-0715-4caf645eeda4" [ 609.219917] env[62974]: _type = "Task" [ 609.219917] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.234765] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528989a4-5f43-cedd-0715-4caf645eeda4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.253272] env[62974]: ERROR nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [req-01f4cb1d-a41a-45ac-a4ed-c33246bfcbc6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-01f4cb1d-a41a-45ac-a4ed-c33246bfcbc6"}]} [ 609.279980] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 609.301367] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 609.301788] env[62974]: DEBUG nova.compute.provider_tree [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.322725] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 609.354207] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 609.511900] env[62974]: DEBUG nova.compute.manager [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Received event network-changed-39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 609.511900] env[62974]: DEBUG nova.compute.manager [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Refreshing instance network info cache due to event network-changed-39515e98-a8f4-4af9-9948-b0a5d05d3188. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 609.511900] env[62974]: DEBUG oslo_concurrency.lockutils [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] Acquiring lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.511900] env[62974]: DEBUG oslo_concurrency.lockutils [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] Acquired lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.511900] env[62974]: DEBUG nova.network.neutron [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Refreshing network info cache for port 39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.636383] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653795, 'name': PowerOffVM_Task, 'duration_secs': 0.19665} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.636696] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 609.636920] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.637937] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284e4179-0561-43b2-b2dd-b02a3e2ffff5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.667357] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 609.668478] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a56b09e6-ec3e-4907-ac23-45b9f61279b6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.674921] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653796, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.736659] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528989a4-5f43-cedd-0715-4caf645eeda4, 'name': SearchDatastore_Task, 'duration_secs': 0.013866} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.737140] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.737862] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 05742180-08db-45db-9ee0-e359aa8af2f0/05742180-08db-45db-9ee0-e359aa8af2f0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.737862] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fc1fcd6-7e61-4b4e-866a-f033c82b7752 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.742312] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 609.745019] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 609.745019] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleting the datastore file [datastore1] 1873faa1-dec2-4d17-a71a-c53fea50c09b {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 609.745019] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcdf49ab-678f-4f83-9208-4a290dad6ceb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.750027] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 609.750027] env[62974]: value = "task-2653798" [ 609.750027] env[62974]: _type = "Task" [ 609.750027] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.761279] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 609.761279] env[62974]: value = "task-2653799" [ 609.761279] env[62974]: _type = "Task" [ 609.761279] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.769506] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.777260] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.100020] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39731a00-b88c-449e-987f-76e5ee98b25a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.118651] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca445106-0567-4f16-9bae-a9c75623a63e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.166321] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a31a3e-9d0b-4f8c-8ff9-323ce3c610fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.180285] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653796, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.185982] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6522490c-1aea-4bcb-9f56-abaec4599d35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.206187] env[62974]: DEBUG nova.compute.provider_tree [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.266496] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "2ebb3385-4177-4506-a4b0-52b53405cf49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.266758] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.267252] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653798, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.285450] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165831} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.285626] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 610.285822] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 610.285995] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 610.295244] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "ea2227ff-f694-4baa-af17-dc50338d8fa6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.295998] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.672852] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653796, 'name': CloneVM_Task, 'duration_secs': 1.447122} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.672959] env[62974]: DEBUG nova.network.neutron [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Updated VIF entry in instance network info cache for port 39515e98-a8f4-4af9-9948-b0a5d05d3188. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.673354] env[62974]: DEBUG nova.network.neutron [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Updating instance_info_cache with network_info: [{"id": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "address": "fa:16:3e:0f:52:51", "network": {"id": "a7b97036-f96d-45ad-817d-464bdde49ab0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1367360885-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df4c0c02aa7649ac8ded32754fa9613f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39515e98-a8", "ovs_interfaceid": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.675262] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Created linked-clone VM from snapshot [ 610.676635] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df0c879-02f1-49b5-a22b-080ecf257fdc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.685453] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Uploading image 85f7c315-f08a-46b5-8e04-9a7ff332ae5b {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 610.708356] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 610.708356] env[62974]: value = "vm-535264" [ 610.708356] env[62974]: _type = "VirtualMachine" [ 610.708356] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 610.708911] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-569df194-0329-49fa-b710-d04d76486975 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.720087] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease: (returnval){ [ 610.720087] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52df472b-c627-f489-f4a0-a1f5f0b6ee74" [ 610.720087] env[62974]: _type = "HttpNfcLease" [ 610.720087] env[62974]: } obtained for exporting VM: (result){ [ 610.720087] env[62974]: value = "vm-535264" [ 610.720087] env[62974]: _type = "VirtualMachine" [ 610.720087] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 610.720704] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the lease: (returnval){ [ 610.720704] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52df472b-c627-f489-f4a0-a1f5f0b6ee74" [ 610.720704] env[62974]: _type = "HttpNfcLease" [ 610.720704] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 610.728459] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 610.728459] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52df472b-c627-f489-f4a0-a1f5f0b6ee74" [ 610.728459] env[62974]: _type = "HttpNfcLease" [ 610.728459] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 610.734981] env[62974]: ERROR nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [req-093f30ac-3658-4e24-8bfc-c5bda5df9cbc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-093f30ac-3658-4e24-8bfc-c5bda5df9cbc"}]} [ 610.762258] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592195} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.764377] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 610.765932] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 05742180-08db-45db-9ee0-e359aa8af2f0/05742180-08db-45db-9ee0-e359aa8af2f0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.766281] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.766745] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0702da05-be24-4b06-98d9-4266774adb37 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.773018] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 610.773018] env[62974]: value = "task-2653805" [ 610.773018] env[62974]: _type = "Task" [ 610.773018] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.781264] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.790520] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 610.790646] env[62974]: DEBUG nova.compute.provider_tree [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.825316] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 610.870914] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 611.113403] env[62974]: DEBUG nova.compute.manager [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 611.113403] env[62974]: DEBUG nova.compute.manager [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing instance network info cache due to event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 611.113403] env[62974]: DEBUG oslo_concurrency.lockutils [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] Acquiring lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.113403] env[62974]: DEBUG oslo_concurrency.lockutils [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] Acquired lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.113403] env[62974]: DEBUG nova.network.neutron [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.176689] env[62974]: DEBUG oslo_concurrency.lockutils [req-25fb3bbe-8769-43d3-bd36-1442790ceb6b req-56029cf6-1107-4bd7-901d-f02e889998b4 service nova] Releasing lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.232516] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 611.232516] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52df472b-c627-f489-f4a0-a1f5f0b6ee74" [ 611.232516] env[62974]: _type = "HttpNfcLease" [ 611.232516] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 611.235317] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 611.235317] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52df472b-c627-f489-f4a0-a1f5f0b6ee74" [ 611.235317] env[62974]: _type = "HttpNfcLease" [ 611.235317] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 611.236349] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c9cdce-7aed-43fd-9c5e-b2f43ea5bb73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.244201] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5207ab86-5a1b-882e-b826-c71be030e40b/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 611.244387] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5207ab86-5a1b-882e-b826-c71be030e40b/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 611.320306] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102495} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.320306] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 611.321017] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd9ef25-b79c-424c-afb6-e2a775068234 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.344052] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 05742180-08db-45db-9ee0-e359aa8af2f0/05742180-08db-45db-9ee0-e359aa8af2f0.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.348971] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07416d88-4383-4c78-a1ef-71d43466cd31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.371497] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 611.372731] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 611.372731] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 611.372731] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 611.372731] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 611.372731] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 611.372934] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 611.372934] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 611.372934] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 611.373151] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 611.373412] env[62974]: DEBUG nova.virt.hardware [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 611.374305] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7178a1-91eb-431c-9080-ad0533f43073 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.378748] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 611.378748] env[62974]: value = "task-2653806" [ 611.378748] env[62974]: _type = "Task" [ 611.378748] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.389120] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9306b6-06f3-489d-ac9c-7e0d07ff51df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.398900] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653806, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.402756] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-38cd9573-fae7-4eba-853d-a74397ee7671 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.417380] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:5a:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9d97dbe-61b9-4710-a3f6-ef2caed51d6b', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.422680] env[62974]: DEBUG oslo.service.loopingcall [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.426353] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 611.426353] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1e4cba6-07f2-4b59-b2e7-13c2abe53ac6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.458143] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.458143] env[62974]: value = "task-2653807" [ 611.458143] env[62974]: _type = "Task" [ 611.458143] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.474592] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653807, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.545827] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c516ed-2859-4cf0-b127-20a8204f65fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.553963] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756ecd57-0268-4fac-afa4-427dbfa08739 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.585372] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1ff116-1a6f-4511-a098-46be6d9d7b68 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.593396] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f306603e-f68b-4b0f-843e-8c9b281e552a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.607237] env[62974]: DEBUG nova.compute.provider_tree [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.838172] env[62974]: DEBUG nova.network.neutron [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updated VIF entry in instance network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 611.838531] env[62974]: DEBUG nova.network.neutron [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.890256] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653806, 'name': ReconfigVM_Task, 'duration_secs': 0.3526} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.890677] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 05742180-08db-45db-9ee0-e359aa8af2f0/05742180-08db-45db-9ee0-e359aa8af2f0.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.892068] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8438b09-2ce3-4d2b-8fc5-9e5db06f172b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.900574] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 611.900574] env[62974]: value = "task-2653808" [ 611.900574] env[62974]: _type = "Task" [ 611.900574] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.912989] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653808, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.970925] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653807, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.116198] env[62974]: DEBUG nova.scheduler.client.report [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 612.342100] env[62974]: DEBUG oslo_concurrency.lockutils [req-d347792b-390a-4c5f-9c09-5c547003600f req-e081cc75-84b8-49e6-a300-36538f02f00f service nova] Releasing lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.414195] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653808, 'name': Rename_Task, 'duration_secs': 0.194867} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.414666] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.415073] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70f5922c-5cab-4585-b2e9-7d8399c1d236 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.421959] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 612.421959] env[62974]: value = "task-2653809" [ 612.421959] env[62974]: _type = "Task" [ 612.421959] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.432240] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653809, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.471999] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653807, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.620351] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.570s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.620887] env[62974]: DEBUG nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 612.623757] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.113s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.625810] env[62974]: INFO nova.compute.claims [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.897709] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "59ece0e8-85c2-499d-aba2-fd45fc116013" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.897981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.935151] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653809, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.977853] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653807, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.133195] env[62974]: DEBUG nova.compute.utils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.136852] env[62974]: DEBUG nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 613.137052] env[62974]: DEBUG nova.network.neutron [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 613.182634] env[62974]: DEBUG nova.policy [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85705a53f9314b08aed10199854f0d2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc2dc33e40e549d1a025e4b883c4dfb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 613.437024] env[62974]: DEBUG oslo_vmware.api [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2653809, 'name': PowerOnVM_Task, 'duration_secs': 0.697233} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.437024] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 613.437024] env[62974]: INFO nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Took 8.60 seconds to spawn the instance on the hypervisor. [ 613.437024] env[62974]: DEBUG nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.437024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c42100-1b39-4c19-bbb8-02e8f324dc0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.476106] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653807, 'name': CreateVM_Task, 'duration_secs': 1.855863} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.476106] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 613.477502] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.477502] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.477754] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 613.478037] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687eeb57-f5b8-4883-9561-cdcc7550f7d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.483166] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 613.483166] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea21f7-70e6-ccdb-383b-588f953f2ac0" [ 613.483166] env[62974]: _type = "Task" [ 613.483166] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.491851] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea21f7-70e6-ccdb-383b-588f953f2ac0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.515228] env[62974]: DEBUG nova.network.neutron [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Successfully created port: 30f39769-41ea-4d00-81eb-e86870ef4bae {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 613.638374] env[62974]: DEBUG nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 613.958731] env[62974]: INFO nova.compute.manager [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Took 37.43 seconds to build instance. [ 613.996356] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea21f7-70e6-ccdb-383b-588f953f2ac0, 'name': SearchDatastore_Task, 'duration_secs': 0.017999} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.996356] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.996573] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 613.996810] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.996956] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.997145] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.997400] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0b8c0f8-61b3-41b1-9185-1b12cc56e74e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.009332] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 614.009518] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 614.010269] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c63992a-8103-445b-9b56-b90c73fe9ec8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.018366] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 614.018366] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ca2f7-0245-d342-2a68-a8f9d14fc505" [ 614.018366] env[62974]: _type = "Task" [ 614.018366] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.026357] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ca2f7-0245-d342-2a68-a8f9d14fc505, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.106983] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf1d7fb-14e2-4f9b-97b3-b8de0e3a8e65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.115187] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2f563b-f413-4011-99c3-829c6b1c62a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.149644] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20381cbe-c1ee-42d8-9fd1-fa09fab7fa21 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.157321] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62ff3e7-36e4-4f73-b951-809186f7f01c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.170747] env[62974]: DEBUG nova.compute.provider_tree [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.460515] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7fdc9e5d-2531-4d68-b303-babd9c702e2e tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "05742180-08db-45db-9ee0-e359aa8af2f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.676s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.530077] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ca2f7-0245-d342-2a68-a8f9d14fc505, 'name': SearchDatastore_Task, 'duration_secs': 0.015522} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.530860] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cd42e4a-23c3-41b5-ac67-fce0196c8651 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.536521] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 614.536521] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52482332-d4cf-2734-3b85-611c55dfa78b" [ 614.536521] env[62974]: _type = "Task" [ 614.536521] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.544602] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52482332-d4cf-2734-3b85-611c55dfa78b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.650783] env[62974]: DEBUG nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 614.674226] env[62974]: DEBUG nova.scheduler.client.report [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 614.963096] env[62974]: DEBUG nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 614.987439] env[62974]: DEBUG nova.network.neutron [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Successfully updated port: 30f39769-41ea-4d00-81eb-e86870ef4bae {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 615.047229] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52482332-d4cf-2734-3b85-611c55dfa78b, 'name': SearchDatastore_Task, 'duration_secs': 0.019968} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.047432] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.047592] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 615.047940] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d77861e-b5e0-4f4b-98bc-6bdbf031c410 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.054734] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 615.054734] env[62974]: value = "task-2653810" [ 615.054734] env[62974]: _type = "Task" [ 615.054734] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.062800] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.179291] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.555s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.179751] env[62974]: DEBUG nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 615.182354] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.635s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.184063] env[62974]: INFO nova.compute.claims [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.486978] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.490939] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "refresh_cache-85f8f79d-330a-49cd-b1ae-8de20c70fcab" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.491139] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "refresh_cache-85f8f79d-330a-49cd-b1ae-8de20c70fcab" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.491272] env[62974]: DEBUG nova.network.neutron [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.567767] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653810, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.689255] env[62974]: DEBUG nova.compute.utils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.697872] env[62974]: DEBUG nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Not allocating networking since 'none' was specified. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 616.042275] env[62974]: DEBUG nova.network.neutron [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.068255] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653810, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578378} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.068533] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 616.068744] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 616.069027] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce5d78a4-d586-41fe-a67e-08e3086262df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.076307] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 616.076307] env[62974]: value = "task-2653811" [ 616.076307] env[62974]: _type = "Task" [ 616.076307] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.084631] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.194655] env[62974]: DEBUG nova.network.neutron [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Updating instance_info_cache with network_info: [{"id": "30f39769-41ea-4d00-81eb-e86870ef4bae", "address": "fa:16:3e:db:e4:9e", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f39769-41", "ovs_interfaceid": "30f39769-41ea-4d00-81eb-e86870ef4bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.198877] env[62974]: DEBUG nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 616.587729] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080382} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.587991] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 616.588772] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8915e0-33ef-4ab9-a223-f41337dff6be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.593337] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4aefd36-c7ee-4a64-b551-8c79da7c98af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.612423] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.612980] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e5a746a-99b3-4bd6-9709-6fd464d09061 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.630113] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdac3a75-57c8-4dd2-a43d-1af681fdbcf6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.634815] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 616.634815] env[62974]: value = "task-2653812" [ 616.634815] env[62974]: _type = "Task" [ 616.634815] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.665095] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611130cc-ef16-45e6-bd5f-065c096181a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.670997] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.675971] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd662c4-b519-4508-a3ec-b815ceaa7b65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.689621] env[62974]: DEBUG nova.compute.provider_tree [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.697122] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "refresh_cache-85f8f79d-330a-49cd-b1ae-8de20c70fcab" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.697301] env[62974]: DEBUG nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Instance network_info: |[{"id": "30f39769-41ea-4d00-81eb-e86870ef4bae", "address": "fa:16:3e:db:e4:9e", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f39769-41", "ovs_interfaceid": "30f39769-41ea-4d00-81eb-e86870ef4bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 617.145382] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653812, 'name': ReconfigVM_Task, 'duration_secs': 0.384468} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.145382] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 1873faa1-dec2-4d17-a71a-c53fea50c09b/1873faa1-dec2-4d17-a71a-c53fea50c09b.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 617.145840] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d34e5a4-f8bc-4801-bae6-883f6691c194 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.153275] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 617.153275] env[62974]: value = "task-2653813" [ 617.153275] env[62974]: _type = "Task" [ 617.153275] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.162058] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653813, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.193296] env[62974]: DEBUG nova.scheduler.client.report [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 617.210213] env[62974]: DEBUG nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 617.665061] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653813, 'name': Rename_Task, 'duration_secs': 0.17934} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.665061] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.665283] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bb3fbfb-2ea7-4c66-9533-da19b1680d0c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.672051] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 617.672051] env[62974]: value = "task-2653814" [ 617.672051] env[62974]: _type = "Task" [ 617.672051] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.681492] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653814, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.698622] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.699158] env[62974]: DEBUG nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 617.702071] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.118s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.703525] env[62974]: INFO nova.compute.claims [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.184103] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653814, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.207875] env[62974]: DEBUG nova.compute.utils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 618.212569] env[62974]: DEBUG nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 618.212909] env[62974]: DEBUG nova.network.neutron [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 618.256268] env[62974]: DEBUG nova.policy [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32c96e2ef0194f61b9a3f83fe73cd3f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca220df51dc0414ea400a56fe5e49e1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 618.401928] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 618.402176] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.403243] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 618.403243] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.403243] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 618.403243] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 618.403243] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 618.404071] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 618.404071] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 618.404071] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 618.404071] env[62974]: DEBUG nova.virt.hardware [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 618.406213] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42135dca-10d8-492c-b1c5-cbd9e64545e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.414806] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381c04c8-2105-49ae-b18b-87eb4844c1b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.431711] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:e4:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30f39769-41ea-4d00-81eb-e86870ef4bae', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 618.440645] env[62974]: DEBUG oslo.service.loopingcall [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.445017] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 618.445017] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.445017] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 618.445017] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.445473] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 618.445473] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 618.445473] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 618.445473] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 618.445473] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 618.445633] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 618.445633] env[62974]: DEBUG nova.virt.hardware [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 618.445633] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 618.448395] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e814632-baef-46a1-a2f2-488543e14b01 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.451751] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c870f5c-a8ce-496c-a10f-fad0ec6d0eeb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.475182] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5e5219-274e-4b73-afd6-29b83f5e262b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.478724] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 618.478724] env[62974]: value = "task-2653815" [ 618.478724] env[62974]: _type = "Task" [ 618.478724] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.491791] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 618.497651] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Creating folder: Project (9c181946fc3e46418388d34d27413345). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 618.498420] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89d58fd2-be12-4791-b2cc-f7f7e9806cb9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.503577] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653815, 'name': CreateVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.514034] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Created folder: Project (9c181946fc3e46418388d34d27413345) in parent group-v535199. [ 618.514034] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Creating folder: Instances. Parent ref: group-v535270. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 618.514034] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a64f75c5-a76b-47cb-97e4-9be720648c2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.523693] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Created folder: Instances in parent group-v535270. [ 618.524093] env[62974]: DEBUG oslo.service.loopingcall [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.524295] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 618.524715] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ac1b1d1-ff04-4633-93e7-8bed310f2745 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.544490] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 618.544490] env[62974]: value = "task-2653818" [ 618.544490] env[62974]: _type = "Task" [ 618.544490] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.553538] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653818, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.563270] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52490b64-d846-3875-26d1-48f775488b5f/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 618.563999] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6381d6a0-3d2d-4789-a519-4f176cc074c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.567769] env[62974]: DEBUG nova.network.neutron [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Successfully created port: e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 618.574509] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52490b64-d846-3875-26d1-48f775488b5f/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 618.574884] env[62974]: ERROR oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52490b64-d846-3875-26d1-48f775488b5f/disk-0.vmdk due to incomplete transfer. [ 618.575088] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0d8bc421-3a35-4fe5-9c59-550f054e2615 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.584072] env[62974]: DEBUG oslo_vmware.rw_handles [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52490b64-d846-3875-26d1-48f775488b5f/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 618.584255] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Uploaded image 36a29e07-a677-4f06-ba0b-14b8acfbed22 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 618.586613] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 618.586966] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f66bbc58-3f95-46b5-b241-35b4fd0fcf85 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.598141] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 618.598141] env[62974]: value = "task-2653819" [ 618.598141] env[62974]: _type = "Task" [ 618.598141] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.612262] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653819, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.686462] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653814, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.713821] env[62974]: DEBUG nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 618.990349] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653815, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.060447] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653818, 'name': CreateVM_Task, 'duration_secs': 0.351338} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.064909] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 619.065750] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.065977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.066353] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 619.066658] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45ccf39e-d8dd-4e5b-87a7-d41b877c6e9c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.071710] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 619.071710] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524ef863-ef34-ea57-7e38-b8c97b5445c1" [ 619.071710] env[62974]: _type = "Task" [ 619.071710] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.080774] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524ef863-ef34-ea57-7e38-b8c97b5445c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.122078] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653819, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.190251] env[62974]: DEBUG oslo_vmware.api [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653814, 'name': PowerOnVM_Task, 'duration_secs': 1.057995} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.190251] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.190412] env[62974]: DEBUG nova.compute.manager [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.191269] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5515410-d9d5-458b-ab61-87df5674ad15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.258607] env[62974]: DEBUG nova.compute.manager [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 619.258974] env[62974]: DEBUG nova.compute.manager [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing instance network info cache due to event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 619.259097] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] Acquiring lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.259518] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] Acquired lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.259518] env[62974]: DEBUG nova.network.neutron [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.313871] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a066e20-bcff-466f-bfa7-70949b5c33e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.322558] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1189fed3-a529-48bc-9da2-e7e59c2a546a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.355313] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf92ffb-4e0b-4d52-86da-90d4a7ce4572 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.363187] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92a46f6-147e-4c6b-9727-17a35e4c2e80 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.377087] env[62974]: DEBUG nova.compute.provider_tree [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.489426] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653815, 'name': CreateVM_Task, 'duration_secs': 0.528525} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.489537] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 619.490213] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.583375] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524ef863-ef34-ea57-7e38-b8c97b5445c1, 'name': SearchDatastore_Task, 'duration_secs': 0.041842} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.584688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.584688] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 619.584688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.584688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.584995] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.584995] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.585101] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 619.585285] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80226fd1-1e3f-4de8-a372-ef41b4772b49 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.587468] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75dfce42-9bed-42c9-bee5-b8795379a288 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.592337] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 619.592337] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ec2a8-af1f-824f-5350-2cc020f0d2b5" [ 619.592337] env[62974]: _type = "Task" [ 619.592337] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.607709] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ec2a8-af1f-824f-5350-2cc020f0d2b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.608038] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.608214] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 619.609312] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43321f0a-3923-46b8-b0ae-c13f73ac94a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.614546] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653819, 'name': Destroy_Task, 'duration_secs': 0.573095} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.615181] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Destroyed the VM [ 619.615451] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 619.615704] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-746f1758-4b84-49cd-bbc1-182bface87df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.618499] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 619.618499] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52089f0f-1f2c-20be-2391-66080e6bd3f3" [ 619.618499] env[62974]: _type = "Task" [ 619.618499] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.625013] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 619.625013] env[62974]: value = "task-2653821" [ 619.625013] env[62974]: _type = "Task" [ 619.625013] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.627035] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52089f0f-1f2c-20be-2391-66080e6bd3f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.634466] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653821, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.718912] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.727137] env[62974]: DEBUG nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 619.755426] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.755716] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.755818] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.755988] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.756148] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.756302] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.756598] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.756855] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.757051] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.757220] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.757395] env[62974]: DEBUG nova.virt.hardware [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.758453] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a396677b-1ad0-4710-90c6-17bed917f4c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.771723] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc53cb3f-9543-46de-a3b8-c1cfd1c25eb4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.880063] env[62974]: DEBUG nova.scheduler.client.report [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.050012] env[62974]: DEBUG nova.network.neutron [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updated VIF entry in instance network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.050138] env[62974]: DEBUG nova.network.neutron [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.104417] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ec2a8-af1f-824f-5350-2cc020f0d2b5, 'name': SearchDatastore_Task, 'duration_secs': 0.048794} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.107126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.107126] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.107126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.129399] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52089f0f-1f2c-20be-2391-66080e6bd3f3, 'name': SearchDatastore_Task, 'duration_secs': 0.048591} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.132276] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9938da88-f3b5-4638-af81-1501d2341cdb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.139831] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653821, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.141163] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 620.141163] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dfe859-47ae-cc7b-a0f2-db6baf147dbf" [ 620.141163] env[62974]: _type = "Task" [ 620.141163] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.151922] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dfe859-47ae-cc7b-a0f2-db6baf147dbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.168524] env[62974]: DEBUG nova.compute.manager [req-e296ea9b-89d7-465a-a73f-d687fec981d3 req-47cec3a5-31a6-4f6d-aa5f-167064758c7a service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Received event network-vif-plugged-30f39769-41ea-4d00-81eb-e86870ef4bae {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 620.168968] env[62974]: DEBUG oslo_concurrency.lockutils [req-e296ea9b-89d7-465a-a73f-d687fec981d3 req-47cec3a5-31a6-4f6d-aa5f-167064758c7a service nova] Acquiring lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.168968] env[62974]: DEBUG oslo_concurrency.lockutils [req-e296ea9b-89d7-465a-a73f-d687fec981d3 req-47cec3a5-31a6-4f6d-aa5f-167064758c7a service nova] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.169152] env[62974]: DEBUG oslo_concurrency.lockutils [req-e296ea9b-89d7-465a-a73f-d687fec981d3 req-47cec3a5-31a6-4f6d-aa5f-167064758c7a service nova] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.169328] env[62974]: DEBUG nova.compute.manager [req-e296ea9b-89d7-465a-a73f-d687fec981d3 req-47cec3a5-31a6-4f6d-aa5f-167064758c7a service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] No waiting events found dispatching network-vif-plugged-30f39769-41ea-4d00-81eb-e86870ef4bae {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 620.169483] env[62974]: WARNING nova.compute.manager [req-e296ea9b-89d7-465a-a73f-d687fec981d3 req-47cec3a5-31a6-4f6d-aa5f-167064758c7a service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Received unexpected event network-vif-plugged-30f39769-41ea-4d00-81eb-e86870ef4bae for instance with vm_state building and task_state spawning. [ 620.224552] env[62974]: DEBUG nova.network.neutron [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Successfully updated port: e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 620.385136] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.683s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.385674] env[62974]: DEBUG nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 620.392654] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 23.727s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.449205] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.449587] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.553695] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5dffab7-8259-4252-ba31-ee89f997175a req-bf228f6b-d7b9-4cc4-9d10-131d7649a27a service nova] Releasing lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.639843] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653821, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.654030] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dfe859-47ae-cc7b-a0f2-db6baf147dbf, 'name': SearchDatastore_Task, 'duration_secs': 0.034057} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.654030] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.654030] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 620.654030] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.654318] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.654318] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c1eab31-26b4-49a1-8c7e-dcb841c941ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.655528] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af488405-18eb-4d7e-97f6-b819369020f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.667021] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 620.667021] env[62974]: value = "task-2653822" [ 620.667021] env[62974]: _type = "Task" [ 620.667021] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.671860] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.673352] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.674055] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb3b7175-cc58-495f-9c8e-e356c9f4909e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.681751] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653822, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.685923] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 620.685923] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b06b93-1a0f-84f6-7a15-fb1c3f329057" [ 620.685923] env[62974]: _type = "Task" [ 620.685923] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.696528] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b06b93-1a0f-84f6-7a15-fb1c3f329057, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.727454] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.727454] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.727454] env[62974]: DEBUG nova.network.neutron [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.894360] env[62974]: DEBUG nova.compute.utils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.899025] env[62974]: DEBUG nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.899204] env[62974]: DEBUG nova.network.neutron [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.967042] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.967042] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 620.967042] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Rebuilding the list of instances to heal {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 620.991036] env[62974]: DEBUG nova.policy [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5cd46426fe34c339d110096e7e77a8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '763d0eb3465144309bd666f6586a9ca6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 621.052982] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5207ab86-5a1b-882e-b826-c71be030e40b/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 621.056982] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a979bd1d-31cf-4ee3-a3de-5789d034b307 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.064870] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5207ab86-5a1b-882e-b826-c71be030e40b/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 621.065084] env[62974]: ERROR oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5207ab86-5a1b-882e-b826-c71be030e40b/disk-0.vmdk due to incomplete transfer. [ 621.065368] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-79a6bb5e-e363-4661-8be1-8d8bcaddcee2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.072864] env[62974]: DEBUG oslo_vmware.rw_handles [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5207ab86-5a1b-882e-b826-c71be030e40b/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 621.073113] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Uploaded image 85f7c315-f08a-46b5-8e04-9a7ff332ae5b to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 621.074859] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 621.078176] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7b6e8fcf-d182-4dec-83bb-4a340a5a28bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.087457] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 621.087457] env[62974]: value = "task-2653823" [ 621.087457] env[62974]: _type = "Task" [ 621.087457] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.100034] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653823, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.147026] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653821, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.180621] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653822, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.200158] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b06b93-1a0f-84f6-7a15-fb1c3f329057, 'name': SearchDatastore_Task, 'duration_secs': 0.0113} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.205609] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37a3f025-968c-4335-95b9-cbc09d49e4ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.210738] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 621.210738] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528342c6-e3ec-2e65-57d2-332fb2184b6a" [ 621.210738] env[62974]: _type = "Task" [ 621.210738] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.220402] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528342c6-e3ec-2e65-57d2-332fb2184b6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.285880] env[62974]: DEBUG nova.network.neutron [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.403562] env[62974]: DEBUG nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 621.451124] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cb3052-1520-4d02-95a4-0682fb489d53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.459904] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab906878-6bca-4b10-af60-bca98201ba62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.505011] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 621.505144] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 621.505271] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 621.505397] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 621.510442] env[62974]: DEBUG nova.network.neutron [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updating instance_info_cache with network_info: [{"id": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "address": "fa:16:3e:be:27:61", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4309fab-6f", "ovs_interfaceid": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.511832] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa00d31-d105-4586-90dc-d891067f4bb2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.519306] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "c763d45b-44f0-4557-a726-7aad2bc58ba8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.519552] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.525244] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72689347-ed9d-45da-a059-7bb98407fb12 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.542273] env[62974]: DEBUG nova.compute.provider_tree [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.544586] env[62974]: DEBUG nova.network.neutron [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Successfully created port: 22854a7f-ed93-414f-9a4b-b5b486459cc8 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.548308] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.548449] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.548586] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 621.548737] env[62974]: DEBUG nova.objects.instance [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lazy-loading 'info_cache' on Instance uuid d8b7a39f-ec73-4a87-9b1e-9428ca72f895 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 621.597617] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653823, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.638968] env[62974]: DEBUG oslo_vmware.api [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653821, 'name': RemoveSnapshot_Task, 'duration_secs': 1.593937} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.640531] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 621.640794] env[62974]: INFO nova.compute.manager [None req-d8cd0a3c-0562-452e-8ac2-d4e87a753256 tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Took 17.50 seconds to snapshot the instance on the hypervisor. [ 621.680252] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653822, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540627} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.680519] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 621.680729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 621.681230] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb8f7048-2251-4b83-8bb4-f3d153b94edc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.689838] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 621.689838] env[62974]: value = "task-2653825" [ 621.689838] env[62974]: _type = "Task" [ 621.689838] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.697697] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.723467] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528342c6-e3ec-2e65-57d2-332fb2184b6a, 'name': SearchDatastore_Task, 'duration_secs': 0.014755} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.723467] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.723853] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 85f8f79d-330a-49cd-b1ae-8de20c70fcab/85f8f79d-330a-49cd-b1ae-8de20c70fcab.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 621.724231] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be32147d-3fb1-46fe-ba7f-5886994a1856 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.733202] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 621.733202] env[62974]: value = "task-2653826" [ 621.733202] env[62974]: _type = "Task" [ 621.733202] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.745137] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.759578] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "ecde0e49-c344-4003-b858-8312c1ac344f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.759828] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "ecde0e49-c344-4003-b858-8312c1ac344f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.760097] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "ecde0e49-c344-4003-b858-8312c1ac344f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.760239] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "ecde0e49-c344-4003-b858-8312c1ac344f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.760405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "ecde0e49-c344-4003-b858-8312c1ac344f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.764674] env[62974]: INFO nova.compute.manager [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Terminating instance [ 622.007178] env[62974]: DEBUG nova.compute.manager [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 622.007418] env[62974]: DEBUG nova.compute.manager [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing instance network info cache due to event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 622.007601] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] Acquiring lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.007709] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] Acquired lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.007958] env[62974]: DEBUG nova.network.neutron [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.021963] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Releasing lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.022295] env[62974]: DEBUG nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Instance network_info: |[{"id": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "address": "fa:16:3e:be:27:61", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4309fab-6f", "ovs_interfaceid": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 622.022911] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:27:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88eedc4b-66dc-4845-9f95-858d6db12a7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4309fab-6f6a-4cb4-8401-082b264bf2b9', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.031861] env[62974]: DEBUG oslo.service.loopingcall [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.032495] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 622.032726] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81696e08-8376-4233-9772-8e31dbef8354 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.048219] env[62974]: DEBUG nova.scheduler.client.report [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 622.058423] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.058423] env[62974]: value = "task-2653827" [ 622.058423] env[62974]: _type = "Task" [ 622.058423] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.067748] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653827, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.099230] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653823, 'name': Destroy_Task, 'duration_secs': 0.693698} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.099519] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Destroyed the VM [ 622.099754] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 622.099996] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9cc025de-26cf-45fa-a977-2c515e470a36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.105982] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 622.105982] env[62974]: value = "task-2653828" [ 622.105982] env[62974]: _type = "Task" [ 622.105982] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.113584] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653828, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.199597] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065075} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.199862] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 622.201310] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c3cab7-c035-436b-8ab7-bccf5684e094 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.222988] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 622.223322] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b27c57a-1e39-4a1a-8f3d-e60c3206b4e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.252216] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.253322] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 622.253322] env[62974]: value = "task-2653829" [ 622.253322] env[62974]: _type = "Task" [ 622.253322] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.260894] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653829, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.267750] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "1873faa1-dec2-4d17-a71a-c53fea50c09b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.268386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.268386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "1873faa1-dec2-4d17-a71a-c53fea50c09b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.268386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.268637] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.270718] env[62974]: DEBUG nova.compute.manager [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.270921] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.271445] env[62974]: INFO nova.compute.manager [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Terminating instance [ 622.273406] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f88e081-6aef-4ad9-8879-97101818d201 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.285975] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 622.285975] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd200e18-8788-4311-8161-0dd57475f1be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.418075] env[62974]: DEBUG nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 622.454564] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 622.454871] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.455080] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 622.455286] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.455483] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 622.455646] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 622.455928] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 622.456114] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 622.456300] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 622.456513] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 622.456692] env[62974]: DEBUG nova.virt.hardware [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 622.457647] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcc5d67-3b64-444d-af5a-e34278540453 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.466460] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eade1246-8ce5-4630-9288-f19ea07bf537 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.571443] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653827, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.618937] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653828, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.749307] env[62974]: DEBUG nova.network.neutron [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updated VIF entry in instance network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 622.749668] env[62974]: DEBUG nova.network.neutron [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.758916] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653826, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.766275] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653829, 'name': ReconfigVM_Task, 'duration_secs': 0.423779} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.766537] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.767193] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51ce4600-8b88-42b4-a6ce-a90c5ea3ed44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.773343] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 622.773343] env[62974]: value = "task-2653831" [ 622.773343] env[62974]: _type = "Task" [ 622.773343] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.782233] env[62974]: DEBUG nova.compute.manager [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.782465] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.782744] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653831, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.783773] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b93ec8b-f964-4ac7-bd5e-7bf4df974644 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.790510] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.790792] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d689a2f-f562-4a15-8e49-4433d91406f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.796658] env[62974]: DEBUG oslo_vmware.api [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 622.796658] env[62974]: value = "task-2653832" [ 622.796658] env[62974]: _type = "Task" [ 622.796658] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.807234] env[62974]: DEBUG oslo_vmware.api [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653832, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.062239] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.673s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.065556] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.118s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.067071] env[62974]: INFO nova.compute.claims [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.076117] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Received event network-changed-30f39769-41ea-4d00-81eb-e86870ef4bae {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 623.076316] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Refreshing instance network info cache due to event network-changed-30f39769-41ea-4d00-81eb-e86870ef4bae. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 623.076531] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Acquiring lock "refresh_cache-85f8f79d-330a-49cd-b1ae-8de20c70fcab" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.076672] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Acquired lock "refresh_cache-85f8f79d-330a-49cd-b1ae-8de20c70fcab" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.077041] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Refreshing network info cache for port 30f39769-41ea-4d00-81eb-e86870ef4bae {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 623.084399] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653827, 'name': CreateVM_Task, 'duration_secs': 0.867324} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.085525] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.085525] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.085525] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.085707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 623.085955] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-738c27a8-4a99-4aba-bfa9-63b19b89d203 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.093121] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 623.093121] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb1c03-071b-8f6f-da07-58a2e5434bd3" [ 623.093121] env[62974]: _type = "Task" [ 623.093121] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.101195] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb1c03-071b-8f6f-da07-58a2e5434bd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.120851] env[62974]: DEBUG oslo_vmware.api [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653828, 'name': RemoveSnapshot_Task, 'duration_secs': 0.859714} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.121319] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 623.121541] env[62974]: INFO nova.compute.manager [None req-3b47fac1-ba2f-481f-adcd-619cd77534c2 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Took 16.10 seconds to snapshot the instance on the hypervisor. [ 623.256217] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5c10327-de2b-4a96-b6b9-5ee3d00d07cf req-6c25d56a-b0eb-4a1e-b53f-07a1ccb61152 service nova] Releasing lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.256529] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653826, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.029532} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.256785] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 85f8f79d-330a-49cd-b1ae-8de20c70fcab/85f8f79d-330a-49cd-b1ae-8de20c70fcab.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.257182] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.257269] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90ad8727-c61a-48f5-8a44-fdcf06a898bc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.264342] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 623.264342] env[62974]: value = "task-2653833" [ 623.264342] env[62974]: _type = "Task" [ 623.264342] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.272620] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.281751] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653831, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.310874] env[62974]: DEBUG oslo_vmware.api [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653832, 'name': PowerOffVM_Task, 'duration_secs': 0.206851} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.311411] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.311696] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.312050] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f004ad7-63f9-430f-8bcf-91187d0e97db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.329296] env[62974]: DEBUG nova.network.neutron [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Successfully updated port: 22854a7f-ed93-414f-9a4b-b5b486459cc8 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.368576] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.369237] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.369237] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Deleting the datastore file [datastore1] ecde0e49-c344-4003-b858-8312c1ac344f {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.369413] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-041e18d8-e901-4a4e-bfe3-7c67d5b21c47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.375940] env[62974]: DEBUG oslo_vmware.api [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 623.375940] env[62974]: value = "task-2653836" [ 623.375940] env[62974]: _type = "Task" [ 623.375940] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.385289] env[62974]: DEBUG oslo_vmware.api [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.386278] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.386465] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.386666] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleting the datastore file [datastore2] 1873faa1-dec2-4d17-a71a-c53fea50c09b {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.387009] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa233aec-c356-4223-a4a6-ca4fe3aceb46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.389912] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [{"id": "947659a6-f0ce-4065-a591-6a15666e4ac5", "address": "fa:16:3e:f1:cd:d9", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947659a6-f0", "ovs_interfaceid": "947659a6-f0ce-4065-a591-6a15666e4ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.396086] env[62974]: DEBUG oslo_vmware.api [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 623.396086] env[62974]: value = "task-2653837" [ 623.396086] env[62974]: _type = "Task" [ 623.396086] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.403923] env[62974]: DEBUG oslo_vmware.api [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.515226] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "669cd72c-556f-40b6-8bc2-f50a125c182a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.515460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.605955] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb1c03-071b-8f6f-da07-58a2e5434bd3, 'name': SearchDatastore_Task, 'duration_secs': 0.009292} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.606430] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.606694] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.606956] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.607286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.607351] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.607565] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c678d79a-e69a-4d9b-a524-bd403a0f6aa3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.618394] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.618682] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.621991] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49890756-10f1-4266-80c0-e5f0c76679cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.628413] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 623.628413] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f96164-8cb8-e725-76a3-cb898d34241d" [ 623.628413] env[62974]: _type = "Task" [ 623.628413] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.648018] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f96164-8cb8-e725-76a3-cb898d34241d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.654822] env[62974]: INFO nova.scheduler.client.report [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleted allocation for migration d84a9086-86d7-445d-b99f-b1d247f1cb7c [ 623.776106] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070846} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.779594] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 623.781163] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eff09c5-6c71-4eaa-a4a0-c4e3c344807c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.790424] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653831, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.811212] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 85f8f79d-330a-49cd-b1ae-8de20c70fcab/85f8f79d-330a-49cd-b1ae-8de20c70fcab.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 623.811549] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6968f0a6-c681-4db9-bd67-203769e7757e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.832916] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.835561] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquired lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.835561] env[62974]: DEBUG nova.network.neutron [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.839966] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 623.839966] env[62974]: value = "task-2653838" [ 623.839966] env[62974]: _type = "Task" [ 623.839966] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.857168] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653838, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.887719] env[62974]: DEBUG oslo_vmware.api [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196984} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.888056] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.888219] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 623.888418] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.888592] env[62974]: INFO nova.compute.manager [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Took 1.62 seconds to destroy the instance on the hypervisor. [ 623.888831] env[62974]: DEBUG oslo.service.loopingcall [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 623.889068] env[62974]: DEBUG nova.compute.manager [-] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 623.889213] env[62974]: DEBUG nova.network.neutron [-] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.891834] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-d8b7a39f-ec73-4a87-9b1e-9428ca72f895" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.892442] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 623.892788] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.893302] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.893679] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.894137] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.894966] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.895415] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.895613] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 623.895811] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.908241] env[62974]: DEBUG oslo_vmware.api [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2653837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174416} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.911122] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.911122] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 623.911122] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.911122] env[62974]: INFO nova.compute.manager [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 623.911122] env[62974]: DEBUG oslo.service.loopingcall [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 623.911353] env[62974]: DEBUG nova.compute.manager [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 623.911353] env[62974]: DEBUG nova.network.neutron [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.960628] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Updated VIF entry in instance network info cache for port 30f39769-41ea-4d00-81eb-e86870ef4bae. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 623.961009] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Updating instance_info_cache with network_info: [{"id": "30f39769-41ea-4d00-81eb-e86870ef4bae", "address": "fa:16:3e:db:e4:9e", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f39769-41", "ovs_interfaceid": "30f39769-41ea-4d00-81eb-e86870ef4bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.152636] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f96164-8cb8-e725-76a3-cb898d34241d, 'name': SearchDatastore_Task, 'duration_secs': 0.020421} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.153457] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37c9213e-f34e-4967-9cfb-a28ca03b50ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.159157] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 624.159157] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b5f597-84a7-171c-c3c3-d7d4876b27d9" [ 624.159157] env[62974]: _type = "Task" [ 624.159157] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.162576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05317192-6d6c-41e1-a2f7-a27f3bc27c75 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 31.617s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.173443] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b5f597-84a7-171c-c3c3-d7d4876b27d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009109} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.173760] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.174269] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70/b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 624.174378] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d557d101-6e83-472d-a940-cf72480c02b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.180625] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 624.180625] env[62974]: value = "task-2653839" [ 624.180625] env[62974]: _type = "Task" [ 624.180625] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.193633] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.285858] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653831, 'name': Rename_Task, 'duration_secs': 1.149185} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.286505] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.287062] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2403c92d-8939-4389-b610-7f02f89f02cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.294850] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 624.294850] env[62974]: value = "task-2653840" [ 624.294850] env[62974]: _type = "Task" [ 624.294850] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.306237] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.351824] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "6dc914e9-bce5-4a19-a919-ae94981ea800" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.351954] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.352146] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "6dc914e9-bce5-4a19-a919-ae94981ea800-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.352977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.352977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.358828] env[62974]: INFO nova.compute.manager [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Terminating instance [ 624.369035] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653838, 'name': ReconfigVM_Task, 'duration_secs': 0.283926} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.369349] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 85f8f79d-330a-49cd-b1ae-8de20c70fcab/85f8f79d-330a-49cd-b1ae-8de20c70fcab.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 624.369987] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41b5e4ed-70b3-4ccc-9900-23684f510094 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.384908] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 624.384908] env[62974]: value = "task-2653841" [ 624.384908] env[62974]: _type = "Task" [ 624.384908] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.390991] env[62974]: DEBUG nova.network.neutron [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.400246] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653841, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.403716] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.428640] env[62974]: DEBUG nova.compute.manager [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 624.428818] env[62974]: DEBUG nova.compute.manager [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing instance network info cache due to event network-changed-b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 624.429400] env[62974]: DEBUG oslo_concurrency.lockutils [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] Acquiring lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.429400] env[62974]: DEBUG oslo_concurrency.lockutils [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] Acquired lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.429400] env[62974]: DEBUG nova.network.neutron [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Refreshing network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 624.456460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "586a3541-060f-4859-8507-17faa637b17e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.457011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "586a3541-060f-4859-8507-17faa637b17e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.457468] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "586a3541-060f-4859-8507-17faa637b17e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.457542] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "586a3541-060f-4859-8507-17faa637b17e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.458598] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "586a3541-060f-4859-8507-17faa637b17e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.463521] env[62974]: INFO nova.compute.manager [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Terminating instance [ 624.465916] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Releasing lock "refresh_cache-85f8f79d-330a-49cd-b1ae-8de20c70fcab" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.467322] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Received event network-vif-plugged-e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 624.467322] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Acquiring lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.467322] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.467322] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.467322] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] No waiting events found dispatching network-vif-plugged-e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 624.467470] env[62974]: WARNING nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Received unexpected event network-vif-plugged-e4309fab-6f6a-4cb4-8401-082b264bf2b9 for instance with vm_state building and task_state spawning. [ 624.467470] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Received event network-changed-e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 624.467470] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Refreshing instance network info cache due to event network-changed-e4309fab-6f6a-4cb4-8401-082b264bf2b9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 624.467557] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Acquiring lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.467636] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Acquired lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.467835] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Refreshing network info cache for port e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 624.566295] env[62974]: DEBUG nova.network.neutron [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Updating instance_info_cache with network_info: [{"id": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "address": "fa:16:3e:e5:8b:fe", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22854a7f-ed", "ovs_interfaceid": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.659897] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e4dbd1-8f53-440c-ac0d-141019e7c68d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.672370] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4d3686-770c-4697-a62c-efb2e450f447 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.708801] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084dd443-f608-4876-9179-c8dcc5354adf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.716285] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519199} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.718358] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70/b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 624.718601] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.718902] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0681ab3a-c108-4a02-aee4-f5bcec9a603d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.721605] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08ee7e6-1fc1-4336-b315-0d28a83d874c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.735706] env[62974]: DEBUG nova.compute.provider_tree [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.739484] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 624.739484] env[62974]: value = "task-2653842" [ 624.739484] env[62974]: _type = "Task" [ 624.739484] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.747616] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653842, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.804255] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653840, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.827276] env[62974]: DEBUG nova.network.neutron [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.859051] env[62974]: DEBUG nova.network.neutron [-] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.865470] env[62974]: DEBUG nova.compute.manager [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 624.865729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.866654] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7c7195-b203-43ef-9dd8-a81dba0b32d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.875640] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 624.875831] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df3f85a9-3768-4e02-b528-7708ac967cb9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.880085] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "7f0d367d-9d60-414b-990e-56a2b43fd963" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.880352] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.880553] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "7f0d367d-9d60-414b-990e-56a2b43fd963-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.880736] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.880954] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.883107] env[62974]: INFO nova.compute.manager [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Terminating instance [ 624.897220] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653841, 'name': Rename_Task, 'duration_secs': 0.400371} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.898145] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.898411] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3d50aff-2d57-457c-930f-dc6945c6f915 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.904722] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 624.904722] env[62974]: value = "task-2653844" [ 624.904722] env[62974]: _type = "Task" [ 624.904722] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.912477] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.942219] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 624.942219] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 624.942219] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleting the datastore file [datastore1] 6dc914e9-bce5-4a19-a919-ae94981ea800 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.942714] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77e06303-4631-4fe4-852b-51ea92ffec1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.948946] env[62974]: DEBUG oslo_vmware.api [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 624.948946] env[62974]: value = "task-2653845" [ 624.948946] env[62974]: _type = "Task" [ 624.948946] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.957809] env[62974]: DEBUG oslo_vmware.api [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.973615] env[62974]: DEBUG nova.compute.manager [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 624.973875] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.974828] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac12ee4-7a38-486d-9d68-fcda4a2ae4f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.982829] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 624.983079] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c98e73c-56d6-4348-9021-29dfc0b9be43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.989617] env[62974]: DEBUG oslo_vmware.api [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 624.989617] env[62974]: value = "task-2653846" [ 624.989617] env[62974]: _type = "Task" [ 624.989617] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.998410] env[62974]: DEBUG oslo_vmware.api [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.052027] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "d941a678-1b67-4e0f-8806-e6682ef21774" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.052365] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "d941a678-1b67-4e0f-8806-e6682ef21774" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.070335] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Releasing lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.070335] env[62974]: DEBUG nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Instance network_info: |[{"id": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "address": "fa:16:3e:e5:8b:fe", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22854a7f-ed", "ovs_interfaceid": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 625.070590] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:8b:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22854a7f-ed93-414f-9a4b-b5b486459cc8', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.082828] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Creating folder: Project (763d0eb3465144309bd666f6586a9ca6). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.088940] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-806df38b-96e4-4b2f-96f4-d4c2c55afe6a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.102370] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Created folder: Project (763d0eb3465144309bd666f6586a9ca6) in parent group-v535199. [ 625.102653] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Creating folder: Instances. Parent ref: group-v535274. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.102966] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb8eca15-7528-4bc4-906e-524066feffd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.115488] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Created folder: Instances in parent group-v535274. [ 625.115868] env[62974]: DEBUG oslo.service.loopingcall [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.116113] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 625.116403] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ff3f4ae-fe0f-4659-b849-557f67c69140 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.151298] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.151298] env[62974]: value = "task-2653849" [ 625.151298] env[62974]: _type = "Task" [ 625.151298] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.160740] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653849, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.238498] env[62974]: DEBUG nova.scheduler.client.report [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 625.254082] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108125} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.256653] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.258506] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf61cf7-3acd-4248-85b7-9be397e2f69b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.280409] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70/b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.281998] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49b878f3-86f1-450e-8936-5d28eefa7041 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.298020] env[62974]: DEBUG nova.compute.manager [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Received event network-vif-plugged-22854a7f-ed93-414f-9a4b-b5b486459cc8 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 625.298228] env[62974]: DEBUG oslo_concurrency.lockutils [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] Acquiring lock "22a0a34a-c46b-4246-9a80-3540550bd793-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.298428] env[62974]: DEBUG oslo_concurrency.lockutils [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] Lock "22a0a34a-c46b-4246-9a80-3540550bd793-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.298617] env[62974]: DEBUG oslo_concurrency.lockutils [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] Lock "22a0a34a-c46b-4246-9a80-3540550bd793-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.298804] env[62974]: DEBUG nova.compute.manager [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] No waiting events found dispatching network-vif-plugged-22854a7f-ed93-414f-9a4b-b5b486459cc8 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 625.298967] env[62974]: WARNING nova.compute.manager [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Received unexpected event network-vif-plugged-22854a7f-ed93-414f-9a4b-b5b486459cc8 for instance with vm_state building and task_state spawning. [ 625.299138] env[62974]: DEBUG nova.compute.manager [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Received event network-changed-22854a7f-ed93-414f-9a4b-b5b486459cc8 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 625.299280] env[62974]: DEBUG nova.compute.manager [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Refreshing instance network info cache due to event network-changed-22854a7f-ed93-414f-9a4b-b5b486459cc8. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 625.299454] env[62974]: DEBUG oslo_concurrency.lockutils [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] Acquiring lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.299582] env[62974]: DEBUG oslo_concurrency.lockutils [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] Acquired lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.299724] env[62974]: DEBUG nova.network.neutron [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Refreshing network info cache for port 22854a7f-ed93-414f-9a4b-b5b486459cc8 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.301498] env[62974]: DEBUG nova.network.neutron [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updated VIF entry in instance network info cache for port b8fd7c55-6daa-4314-8b00-89aea7879581. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 625.301811] env[62974]: DEBUG nova.network.neutron [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [{"id": "b8fd7c55-6daa-4314-8b00-89aea7879581", "address": "fa:16:3e:7d:39:b3", "network": {"id": "e86335e7-13c3-4fa3-beaa-0d85df4b33ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1500727308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "57827584df3c485ca936672aebb4c992", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8fd7c55-6d", "ovs_interfaceid": "b8fd7c55-6daa-4314-8b00-89aea7879581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.309657] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updated VIF entry in instance network info cache for port e4309fab-6f6a-4cb4-8401-082b264bf2b9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 625.310072] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updating instance_info_cache with network_info: [{"id": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "address": "fa:16:3e:be:27:61", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4309fab-6f", "ovs_interfaceid": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.313607] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 625.313607] env[62974]: value = "task-2653850" [ 625.313607] env[62974]: _type = "Task" [ 625.313607] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.321777] env[62974]: DEBUG oslo_vmware.api [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653840, 'name': PowerOnVM_Task, 'duration_secs': 0.756805} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.322548] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 625.322675] env[62974]: INFO nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Took 8.11 seconds to spawn the instance on the hypervisor. [ 625.322884] env[62974]: DEBUG nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 625.323926] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2247007a-d08f-438c-aac1-81502eeb6ab6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.331769] env[62974]: INFO nova.compute.manager [-] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Took 1.42 seconds to deallocate network for instance. [ 625.332165] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653850, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.361957] env[62974]: INFO nova.compute.manager [-] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Took 1.47 seconds to deallocate network for instance. [ 625.387118] env[62974]: DEBUG nova.compute.manager [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 625.387363] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 625.388916] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0482a69a-134c-47d9-b18e-0b02204eaf84 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.397289] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 625.397553] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f1da2fe-a0ca-4202-b158-a2ef4d46a458 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.403587] env[62974]: DEBUG oslo_vmware.api [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 625.403587] env[62974]: value = "task-2653851" [ 625.403587] env[62974]: _type = "Task" [ 625.403587] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.416179] env[62974]: DEBUG oslo_vmware.api [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.420255] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653844, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.460220] env[62974]: DEBUG oslo_vmware.api [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2653845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247394} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.460504] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 625.460715] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 625.460918] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.461126] env[62974]: INFO nova.compute.manager [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Took 0.60 seconds to destroy the instance on the hypervisor. [ 625.463584] env[62974]: DEBUG oslo.service.loopingcall [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.463584] env[62974]: DEBUG nova.compute.manager [-] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 625.463584] env[62974]: DEBUG nova.network.neutron [-] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.500939] env[62974]: DEBUG oslo_vmware.api [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653846, 'name': PowerOffVM_Task, 'duration_secs': 0.438189} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.501276] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 625.501462] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 625.501717] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b0d63b1-c752-4785-8d45-6f155c449030 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.661993] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653849, 'name': CreateVM_Task, 'duration_secs': 0.420699} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.662892] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.663037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.664189] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.664189] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 625.664189] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d319f9d0-61c8-47e1-8c8d-690c544106cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.668971] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 625.668971] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5268eb13-2c03-2ee6-276d-2292713ac3b9" [ 625.668971] env[62974]: _type = "Task" [ 625.668971] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.678869] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5268eb13-2c03-2ee6-276d-2292713ac3b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.752277] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.752277] env[62974]: DEBUG nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 625.753522] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.232s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.755088] env[62974]: INFO nova.compute.claims [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.811392] env[62974]: DEBUG oslo_concurrency.lockutils [req-6d1c04b4-56af-42b4-89ba-7549150fb743 req-a9f1367d-855d-4803-b67a-51a9391e02e7 service nova] Releasing lock "refresh_cache-586a3541-060f-4859-8507-17faa637b17e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.814759] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Releasing lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.815095] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Received event network-changed-39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 625.815251] env[62974]: DEBUG nova.compute.manager [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Refreshing instance network info cache due to event network-changed-39515e98-a8f4-4af9-9948-b0a5d05d3188. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 625.815458] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Acquiring lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.815602] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Acquired lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.815793] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Refreshing network info cache for port 39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.834338] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653850, 'name': ReconfigVM_Task, 'duration_secs': 0.399227} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.834791] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Reconfigured VM instance instance-00000018 to attach disk [datastore2] b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70/b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.835692] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb9966ec-5f17-4657-9dd6-39f0981e92fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.847267] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.850466] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 625.850466] env[62974]: value = "task-2653854" [ 625.850466] env[62974]: _type = "Task" [ 625.850466] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.850994] env[62974]: INFO nova.compute.manager [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Took 45.36 seconds to build instance. [ 625.865940] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653854, 'name': Rename_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.871714] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.921989] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653844, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.925522] env[62974]: DEBUG oslo_vmware.api [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653851, 'name': PowerOffVM_Task, 'duration_secs': 0.325152} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.925939] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 625.926188] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 625.926496] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-792c0544-2cef-4a25-84a0-7828a55b073e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.015278] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 626.015510] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 626.015694] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Deleting the datastore file [datastore2] 7f0d367d-9d60-414b-990e-56a2b43fd963 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 626.015960] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf94126d-d156-4057-88ce-f6e24160268a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.025087] env[62974]: DEBUG oslo_vmware.api [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for the task: (returnval){ [ 626.025087] env[62974]: value = "task-2653856" [ 626.025087] env[62974]: _type = "Task" [ 626.025087] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.041021] env[62974]: DEBUG oslo_vmware.api [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.159654] env[62974]: DEBUG nova.network.neutron [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Updated VIF entry in instance network info cache for port 22854a7f-ed93-414f-9a4b-b5b486459cc8. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.160186] env[62974]: DEBUG nova.network.neutron [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Updating instance_info_cache with network_info: [{"id": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "address": "fa:16:3e:e5:8b:fe", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22854a7f-ed", "ovs_interfaceid": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.187770] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5268eb13-2c03-2ee6-276d-2292713ac3b9, 'name': SearchDatastore_Task, 'duration_secs': 0.015573} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.188698] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.188698] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.189021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.189325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.189608] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.189994] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a25db33-6f55-45a6-a5b9-90587b53057c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.210328] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.210598] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.212035] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5492f0d8-2a91-4b86-848e-f1f24c065ca5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.219926] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 626.219926] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e0e89f-2c77-5da5-4afa-5c98187b5b68" [ 626.219926] env[62974]: _type = "Task" [ 626.219926] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.232570] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e0e89f-2c77-5da5-4afa-5c98187b5b68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.266633] env[62974]: DEBUG nova.compute.utils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 626.268617] env[62974]: DEBUG nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 626.268617] env[62974]: DEBUG nova.network.neutron [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 626.313623] env[62974]: DEBUG nova.network.neutron [-] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.356723] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14075c90-0641-4c49-ba83-76ca93cdf26a tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.233s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.363172] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653854, 'name': Rename_Task, 'duration_secs': 0.320218} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.363488] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.363790] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-026651a3-71bf-419d-811c-0ba664b960cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.371991] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 626.371991] env[62974]: value = "task-2653857" [ 626.371991] env[62974]: _type = "Task" [ 626.371991] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.382040] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653857, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.399289] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 626.399568] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 626.399842] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Deleting the datastore file [datastore1] 586a3541-060f-4859-8507-17faa637b17e {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 626.400147] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7c36fba-2a89-4b8c-9fe1-b642d40559d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.410121] env[62974]: DEBUG oslo_vmware.api [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for the task: (returnval){ [ 626.410121] env[62974]: value = "task-2653858" [ 626.410121] env[62974]: _type = "Task" [ 626.410121] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.425340] env[62974]: DEBUG oslo_vmware.api [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.427539] env[62974]: DEBUG nova.policy [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc5ab26774ed4f098ed9b2c733763b80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e40d31e6ca74d9c913e2ac2ae32f84c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 626.434459] env[62974]: DEBUG oslo_vmware.api [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2653844, 'name': PowerOnVM_Task, 'duration_secs': 1.0373} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.435348] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 626.435635] env[62974]: INFO nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Took 11.78 seconds to spawn the instance on the hypervisor. [ 626.435889] env[62974]: DEBUG nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 626.437127] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb3acb6-600b-47f0-9b7f-cd4f6da2a756 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.535492] env[62974]: DEBUG oslo_vmware.api [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Task: {'id': task-2653856, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.391114} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.538527] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 626.538871] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 626.539233] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 626.539532] env[62974]: INFO nova.compute.manager [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Took 1.15 seconds to destroy the instance on the hypervisor. [ 626.539901] env[62974]: DEBUG oslo.service.loopingcall [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 626.540534] env[62974]: DEBUG nova.compute.manager [-] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 626.540864] env[62974]: DEBUG nova.network.neutron [-] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.664533] env[62974]: DEBUG oslo_concurrency.lockutils [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] Releasing lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.664533] env[62974]: DEBUG nova.compute.manager [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Received event network-vif-deleted-a9d97dbe-61b9-4710-a3f6-ef2caed51d6b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 626.664533] env[62974]: DEBUG nova.compute.manager [req-241cdc53-868a-433a-8507-1d5c741299f8 req-3dbe87a4-0a6b-46bf-8869-37e4887b27a5 service nova] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Received event network-vif-deleted-e786c602-63b4-4d89-80a2-b141043584eb {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 626.734194] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e0e89f-2c77-5da5-4afa-5c98187b5b68, 'name': SearchDatastore_Task, 'duration_secs': 0.021094} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.739208] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a70de45-a3f1-40da-ac21-ea3ba2a88131 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.748092] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 626.748092] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522909fe-f28c-796c-6be9-0d01f8aa5d82" [ 626.748092] env[62974]: _type = "Task" [ 626.748092] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.758843] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522909fe-f28c-796c-6be9-0d01f8aa5d82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.775157] env[62974]: DEBUG nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 626.817486] env[62974]: INFO nova.compute.manager [-] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Took 1.36 seconds to deallocate network for instance. [ 626.865087] env[62974]: DEBUG nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 626.883180] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Updated VIF entry in instance network info cache for port 39515e98-a8f4-4af9-9948-b0a5d05d3188. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.883683] env[62974]: DEBUG nova.network.neutron [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Updating instance_info_cache with network_info: [{"id": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "address": "fa:16:3e:0f:52:51", "network": {"id": "a7b97036-f96d-45ad-817d-464bdde49ab0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1367360885-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df4c0c02aa7649ac8ded32754fa9613f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39515e98-a8", "ovs_interfaceid": "39515e98-a8f4-4af9-9948-b0a5d05d3188", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.893831] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653857, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.924532] env[62974]: DEBUG oslo_vmware.api [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.961696] env[62974]: INFO nova.compute.manager [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Took 46.78 seconds to build instance. [ 627.110819] env[62974]: DEBUG nova.network.neutron [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Successfully created port: aa8b790d-e5e2-42e7-bb13-826c844d11bc {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.243623] env[62974]: INFO nova.compute.manager [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Rebuilding instance [ 627.258987] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522909fe-f28c-796c-6be9-0d01f8aa5d82, 'name': SearchDatastore_Task, 'duration_secs': 0.022098} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.259298] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.259555] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 22a0a34a-c46b-4246-9a80-3540550bd793/22a0a34a-c46b-4246-9a80-3540550bd793.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.259823] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73af07ae-4cbf-4809-afae-3b358d854986 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.270477] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 627.270477] env[62974]: value = "task-2653859" [ 627.270477] env[62974]: _type = "Task" [ 627.270477] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.289152] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.307029] env[62974]: DEBUG nova.compute.manager [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.307029] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d737fc57-e4ad-486a-b5e2-137be31a4b55 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.329547] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.387644] env[62974]: DEBUG nova.compute.manager [req-e20f56c8-0418-4fa6-9b66-6adf0175ea35 req-0399b389-58b7-49df-a498-b5e8ce8d0d01 service nova] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Received event network-vif-deleted-b07f0ace-3474-4ef6-81c7-2959c86f0791 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 627.387644] env[62974]: DEBUG nova.compute.manager [req-e20f56c8-0418-4fa6-9b66-6adf0175ea35 req-0399b389-58b7-49df-a498-b5e8ce8d0d01 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Received event network-vif-deleted-ebd2d4e4-f1df-4022-a6b8-66224fadfb3d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 627.387644] env[62974]: INFO nova.compute.manager [req-e20f56c8-0418-4fa6-9b66-6adf0175ea35 req-0399b389-58b7-49df-a498-b5e8ce8d0d01 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Neutron deleted interface ebd2d4e4-f1df-4022-a6b8-66224fadfb3d; detaching it from the instance and deleting it from the info cache [ 627.387644] env[62974]: DEBUG nova.network.neutron [req-e20f56c8-0418-4fa6-9b66-6adf0175ea35 req-0399b389-58b7-49df-a498-b5e8ce8d0d01 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.393881] env[62974]: DEBUG oslo_concurrency.lockutils [req-aeb186f5-55d9-4ecd-ba45-16ff72f303a8 req-73579c43-fc34-44ed-9697-dd145c702472 service nova] Releasing lock "refresh_cache-05742180-08db-45db-9ee0-e359aa8af2f0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.394359] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653857, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.395604] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.412773] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59aef84-4c76-4f34-a526-ea40d258affa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.437030] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ed4895-8cea-4e4c-871f-982b223f0cd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.442811] env[62974]: DEBUG oslo_vmware.api [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Task: {'id': task-2653858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.570371} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.444037] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 627.444037] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 627.444037] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 627.444037] env[62974]: INFO nova.compute.manager [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] [instance: 586a3541-060f-4859-8507-17faa637b17e] Took 2.47 seconds to destroy the instance on the hypervisor. [ 627.444407] env[62974]: DEBUG oslo.service.loopingcall [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.445232] env[62974]: DEBUG nova.compute.manager [-] [instance: 586a3541-060f-4859-8507-17faa637b17e] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 627.445381] env[62974]: DEBUG nova.network.neutron [-] [instance: 586a3541-060f-4859-8507-17faa637b17e] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.477579] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed81066f-1e24-4b55-900f-f0847adac3d5 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.666s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.479574] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b46bfe7-57d6-400f-a184-e74271c8f89f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.487844] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36da5b01-d454-4e2a-ab12-f1892ef5a50f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.502860] env[62974]: DEBUG nova.compute.provider_tree [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.608098] env[62974]: DEBUG nova.network.neutron [-] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.786764] env[62974]: DEBUG nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 627.789230] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653859, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.816543] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 627.816848] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 627.817067] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 627.817270] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 627.817415] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 627.817557] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 627.818027] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 627.818027] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 627.818141] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 627.818322] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 627.818433] env[62974]: DEBUG nova.virt.hardware [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 627.821167] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf60cfc-ef0e-499e-af23-7b9f5da74d97 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.834147] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbb99ea-c3fe-4b50-a6eb-ccce980f7260 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.886402] env[62974]: DEBUG oslo_vmware.api [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653857, 'name': PowerOnVM_Task, 'duration_secs': 1.048007} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.886688] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.886936] env[62974]: INFO nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Took 8.16 seconds to spawn the instance on the hypervisor. [ 627.887181] env[62974]: DEBUG nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.889566] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02c287f-e5ab-4e6d-b69a-aa0557c7ae52 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.896520] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-308cba8d-9fdd-45e6-bb40-c0cbf32e19ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.914127] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c011d46f-0408-4ca0-b4fe-5669b8025cd7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.961723] env[62974]: DEBUG nova.compute.manager [req-e20f56c8-0418-4fa6-9b66-6adf0175ea35 req-0399b389-58b7-49df-a498-b5e8ce8d0d01 service nova] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Detach interface failed, port_id=ebd2d4e4-f1df-4022-a6b8-66224fadfb3d, reason: Instance 7f0d367d-9d60-414b-990e-56a2b43fd963 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 627.983440] env[62974]: DEBUG nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 628.011497] env[62974]: DEBUG nova.scheduler.client.report [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 628.111851] env[62974]: INFO nova.compute.manager [-] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Took 1.57 seconds to deallocate network for instance. [ 628.284127] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663817} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.284375] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 22a0a34a-c46b-4246-9a80-3540550bd793/22a0a34a-c46b-4246-9a80-3540550bd793.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.284826] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.284877] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9aded0e9-a8d0-4041-b7f3-ca8adcbadcab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.295733] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 628.295733] env[62974]: value = "task-2653860" [ 628.295733] env[62974]: _type = "Task" [ 628.295733] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.306238] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.319575] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "8621428e-cf42-47a4-82c8-a003c377b257" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.319817] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.326083] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 628.326743] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cef3903b-34ca-40b3-ba57-40af11cd6294 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.333776] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 628.333776] env[62974]: value = "task-2653861" [ 628.333776] env[62974]: _type = "Task" [ 628.333776] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.344344] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.393968] env[62974]: DEBUG nova.network.neutron [-] [instance: 586a3541-060f-4859-8507-17faa637b17e] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.412652] env[62974]: INFO nova.compute.manager [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Took 43.89 seconds to build instance. [ 628.502917] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.515907] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.762s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.516442] env[62974]: DEBUG nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 628.520852] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.923s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.520852] env[62974]: INFO nova.compute.claims [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.619707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.685135] env[62974]: DEBUG nova.network.neutron [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Successfully updated port: aa8b790d-e5e2-42e7-bb13-826c844d11bc {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.757672] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "da43a464-ebae-4038-9f7b-330df22d8d7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.757916] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.806200] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088242} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.806511] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.807319] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d71b30-65d8-4470-8ac9-3f5d20dcd3d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.829202] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 22a0a34a-c46b-4246-9a80-3540550bd793/22a0a34a-c46b-4246-9a80-3540550bd793.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.830094] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d954800e-1850-4a49-976d-deb9068816f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.852659] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653861, 'name': PowerOffVM_Task, 'duration_secs': 0.131196} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.853865] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 628.854155] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 628.854974] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 628.854974] env[62974]: value = "task-2653862" [ 628.854974] env[62974]: _type = "Task" [ 628.854974] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.855254] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2f1187-14b7-4d96-9e8b-072bd9fff4c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.865255] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653862, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.867272] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 628.867519] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d750ae32-b969-4570-af4d-9340a8c6cda8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.893258] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 628.893510] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 628.893637] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Deleting the datastore file [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 628.894287] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f654b692-fc6b-49fe-8f2f-2b3c681f726e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.896652] env[62974]: INFO nova.compute.manager [-] [instance: 586a3541-060f-4859-8507-17faa637b17e] Took 1.45 seconds to deallocate network for instance. [ 628.910939] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 628.910939] env[62974]: value = "task-2653864" [ 628.910939] env[62974]: _type = "Task" [ 628.910939] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.914285] env[62974]: DEBUG oslo_concurrency.lockutils [None req-12d558b3-52f6-41f4-9371-9db4d12e713d tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.013s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.926021] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.025703] env[62974]: DEBUG nova.compute.utils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 629.028815] env[62974]: DEBUG nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 629.028982] env[62974]: DEBUG nova.network.neutron [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 629.075392] env[62974]: DEBUG nova.policy [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42e27e7ee7d74b54bef0100fb07b64f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9a9caff8b384acdb8294a5efac6df25', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 629.191583] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "refresh_cache-30fcd64c-4570-454b-a7e5-3246c92d90fc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.191583] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired lock "refresh_cache-30fcd64c-4570-454b-a7e5-3246c92d90fc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.191583] env[62974]: DEBUG nova.network.neutron [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.197189] env[62974]: DEBUG nova.compute.manager [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 629.200134] env[62974]: DEBUG nova.compute.manager [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing instance network info cache due to event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 629.200134] env[62974]: DEBUG oslo_concurrency.lockutils [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] Acquiring lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.200134] env[62974]: DEBUG oslo_concurrency.lockutils [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] Acquired lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.200134] env[62974]: DEBUG nova.network.neutron [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 629.369598] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653862, 'name': ReconfigVM_Task, 'duration_secs': 0.2775} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.369870] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 22a0a34a-c46b-4246-9a80-3540550bd793/22a0a34a-c46b-4246-9a80-3540550bd793.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.371091] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f74417f8-91ca-4eb9-8f04-99e499f803d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.377552] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 629.377552] env[62974]: value = "task-2653865" [ 629.377552] env[62974]: _type = "Task" [ 629.377552] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.378329] env[62974]: DEBUG nova.network.neutron [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Successfully created port: 0b68ad10-900f-4830-8982-2ad39bf5724d {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.389932] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653865, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.403611] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.416156] env[62974]: DEBUG nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 629.421817] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184825} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.422315] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 629.422501] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 629.422674] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 629.511568] env[62974]: DEBUG nova.compute.manager [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 586a3541-060f-4859-8507-17faa637b17e] Received event network-vif-deleted-b8fd7c55-6daa-4314-8b00-89aea7879581 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 629.511833] env[62974]: DEBUG nova.compute.manager [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Received event network-vif-plugged-aa8b790d-e5e2-42e7-bb13-826c844d11bc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 629.512270] env[62974]: DEBUG oslo_concurrency.lockutils [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] Acquiring lock "30fcd64c-4570-454b-a7e5-3246c92d90fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.512639] env[62974]: DEBUG oslo_concurrency.lockutils [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.512831] env[62974]: DEBUG oslo_concurrency.lockutils [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.512944] env[62974]: DEBUG nova.compute.manager [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] No waiting events found dispatching network-vif-plugged-aa8b790d-e5e2-42e7-bb13-826c844d11bc {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 629.513126] env[62974]: WARNING nova.compute.manager [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Received unexpected event network-vif-plugged-aa8b790d-e5e2-42e7-bb13-826c844d11bc for instance with vm_state building and task_state spawning. [ 629.513287] env[62974]: DEBUG nova.compute.manager [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Received event network-changed-aa8b790d-e5e2-42e7-bb13-826c844d11bc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 629.513434] env[62974]: DEBUG nova.compute.manager [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Refreshing instance network info cache due to event network-changed-aa8b790d-e5e2-42e7-bb13-826c844d11bc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 629.513598] env[62974]: DEBUG oslo_concurrency.lockutils [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] Acquiring lock "refresh_cache-30fcd64c-4570-454b-a7e5-3246c92d90fc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.529583] env[62974]: DEBUG nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 629.754374] env[62974]: DEBUG nova.network.neutron [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.894559] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653865, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.946074] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.999743] env[62974]: DEBUG nova.network.neutron [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Updating instance_info_cache with network_info: [{"id": "aa8b790d-e5e2-42e7-bb13-826c844d11bc", "address": "fa:16:3e:17:d5:cc", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa8b790d-e5", "ovs_interfaceid": "aa8b790d-e5e2-42e7-bb13-826c844d11bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.043471] env[62974]: DEBUG nova.network.neutron [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updated VIF entry in instance network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.043804] env[62974]: DEBUG nova.network.neutron [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [{"id": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "address": "fa:16:3e:cb:fb:0a", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dbab348-e4", "ovs_interfaceid": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.101405] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea0e331-393e-46c2-a7b3-df6f0f649404 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.109389] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6a0b57-99aa-461a-baf9-e6edcd71056c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.141791] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9a3434-0c6e-4c8d-b5db-27873c5a671c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.149298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de750c02-68bd-4fbe-b360-6ee970de0ea1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.163033] env[62974]: DEBUG nova.compute.provider_tree [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.391568] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653865, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.465172] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 630.465430] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.465587] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 630.465762] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.465907] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 630.466067] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 630.466279] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 630.466437] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 630.466598] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 630.466756] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 630.466994] env[62974]: DEBUG nova.virt.hardware [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 630.467863] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d47955a-9be6-4e4e-8e12-823d0b0c3ec2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.478776] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e252c3-1a0d-4710-9319-0bbed5669829 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.499293] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.509532] env[62974]: DEBUG oslo.service.loopingcall [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.510684] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Releasing lock "refresh_cache-30fcd64c-4570-454b-a7e5-3246c92d90fc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.510684] env[62974]: DEBUG nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Instance network_info: |[{"id": "aa8b790d-e5e2-42e7-bb13-826c844d11bc", "address": "fa:16:3e:17:d5:cc", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa8b790d-e5", "ovs_interfaceid": "aa8b790d-e5e2-42e7-bb13-826c844d11bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 630.510934] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 630.510934] env[62974]: DEBUG oslo_concurrency.lockutils [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] Acquired lock "refresh_cache-30fcd64c-4570-454b-a7e5-3246c92d90fc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.511177] env[62974]: DEBUG nova.network.neutron [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Refreshing network info cache for port aa8b790d-e5e2-42e7-bb13-826c844d11bc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.516224] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:d5:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa8b790d-e5e2-42e7-bb13-826c844d11bc', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.528935] env[62974]: DEBUG oslo.service.loopingcall [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.529257] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce434ee7-7199-4f67-99f9-dd42d9ab83e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.555600] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 630.557399] env[62974]: DEBUG nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 630.560134] env[62974]: DEBUG oslo_concurrency.lockutils [req-8881a7c5-ca2f-4b31-89ea-d5ed9c105dc3 req-d4357193-e8e4-4cfc-ac96-09fa39e75369 service nova] Releasing lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.561528] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8b209f2-34d0-4653-9f53-172cebc26180 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.592907] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.592907] env[62974]: value = "task-2653867" [ 630.592907] env[62974]: _type = "Task" [ 630.592907] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.600276] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 630.600516] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.600671] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 630.600849] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.600991] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 630.601147] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 630.601350] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 630.601507] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 630.601667] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 630.601827] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 630.601996] env[62974]: DEBUG nova.virt.hardware [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 630.602711] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.602711] env[62974]: value = "task-2653868" [ 630.602711] env[62974]: _type = "Task" [ 630.602711] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.603408] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2cfbd4-745b-4d5c-8e32-aec6e773b86c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.609309] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653867, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.617421] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653868, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.620276] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc591455-5544-44e6-bf29-1e0728a3248d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.673016] env[62974]: DEBUG nova.scheduler.client.report [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 630.892973] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653865, 'name': Rename_Task, 'duration_secs': 1.144158} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.893306] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 630.893575] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de60ce46-457b-4e54-bc2c-c333bfcd7887 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.900913] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 630.900913] env[62974]: value = "task-2653869" [ 630.900913] env[62974]: _type = "Task" [ 630.900913] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.909942] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653869, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.916163] env[62974]: DEBUG nova.network.neutron [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Successfully updated port: 0b68ad10-900f-4830-8982-2ad39bf5724d {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 631.103391] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653867, 'name': CreateVM_Task, 'duration_secs': 0.307634} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.103562] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 631.103979] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.104292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.104612] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 631.104873] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94defba7-fc5b-4ef0-b123-aba2cd9abf7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.109779] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 631.109779] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520629e6-457e-fcc3-1568-d2bf134534df" [ 631.109779] env[62974]: _type = "Task" [ 631.109779] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.121574] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653868, 'name': CreateVM_Task, 'duration_secs': 0.412628} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.125823] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 631.125823] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520629e6-457e-fcc3-1568-d2bf134534df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.125823] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.177638] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.179088] env[62974]: DEBUG nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 631.181634] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.291s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.181854] env[62974]: DEBUG nova.objects.instance [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lazy-loading 'resources' on Instance uuid 001557f9-ea50-4e86-9eeb-dd4436791453 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 631.276199] env[62974]: DEBUG nova.network.neutron [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Updated VIF entry in instance network info cache for port aa8b790d-e5e2-42e7-bb13-826c844d11bc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 631.276592] env[62974]: DEBUG nova.network.neutron [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Updating instance_info_cache with network_info: [{"id": "aa8b790d-e5e2-42e7-bb13-826c844d11bc", "address": "fa:16:3e:17:d5:cc", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa8b790d-e5", "ovs_interfaceid": "aa8b790d-e5e2-42e7-bb13-826c844d11bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.410566] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653869, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.418759] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "refresh_cache-a7a014b9-10e1-45a0-85da-4754051e8d82" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.418900] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquired lock "refresh_cache-a7a014b9-10e1-45a0-85da-4754051e8d82" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.419055] env[62974]: DEBUG nova.network.neutron [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 631.623562] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520629e6-457e-fcc3-1568-d2bf134534df, 'name': SearchDatastore_Task, 'duration_secs': 0.016026} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.623831] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.624080] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.624303] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.624444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.624620] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 631.624963] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.625214] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 631.625439] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-851bcaee-c0f7-495a-a2f1-20c468c3c92c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.628748] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3814ac2-f423-4511-b20f-88cdc389e6c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.634197] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 631.634197] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5247ed16-b3e8-8409-de88-d5df27dd3eac" [ 631.634197] env[62974]: _type = "Task" [ 631.634197] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.637883] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.638120] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 631.639109] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c498e76-2ade-4021-84bb-8cb6e1f53ceb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.644561] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5247ed16-b3e8-8409-de88-d5df27dd3eac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.648153] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 631.648153] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527f32a1-831a-4025-a652-2283defa59ef" [ 631.648153] env[62974]: _type = "Task" [ 631.648153] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.659194] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527f32a1-831a-4025-a652-2283defa59ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.667170] env[62974]: DEBUG nova.compute.manager [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 631.667293] env[62974]: DEBUG nova.compute.manager [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing instance network info cache due to event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 631.667503] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] Acquiring lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.667644] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] Acquired lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.667800] env[62974]: DEBUG nova.network.neutron [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 631.685043] env[62974]: DEBUG nova.compute.utils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.691057] env[62974]: DEBUG nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 631.691057] env[62974]: DEBUG nova.network.neutron [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 631.727753] env[62974]: DEBUG nova.policy [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a86bbc98ec50467792b3c6a6cedc790b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14dd4a9a77ad40458d40bb82ac4b90a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 631.780354] env[62974]: DEBUG oslo_concurrency.lockutils [req-f0243965-31ea-4d48-a1d6-e39fcba16d7b req-c19df6bb-e6a1-415c-a17b-c2e79325d58d service nova] Releasing lock "refresh_cache-30fcd64c-4570-454b-a7e5-3246c92d90fc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.805096] env[62974]: DEBUG nova.compute.manager [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Received event network-vif-plugged-0b68ad10-900f-4830-8982-2ad39bf5724d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 631.805314] env[62974]: DEBUG oslo_concurrency.lockutils [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] Acquiring lock "a7a014b9-10e1-45a0-85da-4754051e8d82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.805342] env[62974]: DEBUG oslo_concurrency.lockutils [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.805494] env[62974]: DEBUG oslo_concurrency.lockutils [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.805655] env[62974]: DEBUG nova.compute.manager [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] No waiting events found dispatching network-vif-plugged-0b68ad10-900f-4830-8982-2ad39bf5724d {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 631.805847] env[62974]: WARNING nova.compute.manager [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Received unexpected event network-vif-plugged-0b68ad10-900f-4830-8982-2ad39bf5724d for instance with vm_state building and task_state spawning. [ 631.805971] env[62974]: DEBUG nova.compute.manager [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Received event network-changed-0b68ad10-900f-4830-8982-2ad39bf5724d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 631.806130] env[62974]: DEBUG nova.compute.manager [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Refreshing instance network info cache due to event network-changed-0b68ad10-900f-4830-8982-2ad39bf5724d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 631.806287] env[62974]: DEBUG oslo_concurrency.lockutils [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] Acquiring lock "refresh_cache-a7a014b9-10e1-45a0-85da-4754051e8d82" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.916133] env[62974]: DEBUG oslo_vmware.api [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653869, 'name': PowerOnVM_Task, 'duration_secs': 0.532011} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.918645] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 631.919087] env[62974]: INFO nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Took 9.50 seconds to spawn the instance on the hypervisor. [ 631.919404] env[62974]: DEBUG nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 631.920836] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4959d52-cf61-4854-98ab-ec9618f4c47f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.981569] env[62974]: DEBUG nova.network.neutron [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.096006] env[62974]: DEBUG nova.network.neutron [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Successfully created port: 36d99cd3-daa8-4da2-b43f-85af2aaa66db {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.144989] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5247ed16-b3e8-8409-de88-d5df27dd3eac, 'name': SearchDatastore_Task, 'duration_secs': 0.010712} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.147513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.147659] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.147875] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.158302] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527f32a1-831a-4025-a652-2283defa59ef, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.161312] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39fb675a-1aab-4879-9afb-f73e94eb559c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.173524] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 632.173524] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526e86dd-cefb-7c55-5b8b-69b2944ed397" [ 632.173524] env[62974]: _type = "Task" [ 632.173524] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.186056] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526e86dd-cefb-7c55-5b8b-69b2944ed397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.189704] env[62974]: DEBUG nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 632.197265] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ecfcfe-7585-49dd-bb3a-dc88cc8a7b43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.204390] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d516e3-4b23-47bb-849a-dea9b2a16769 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.240656] env[62974]: DEBUG nova.network.neutron [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Updating instance_info_cache with network_info: [{"id": "0b68ad10-900f-4830-8982-2ad39bf5724d", "address": "fa:16:3e:86:b0:df", "network": {"id": "c5e11726-b0f4-41a0-a30a-78b46f9826e3", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-918475303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9a9caff8b384acdb8294a5efac6df25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b68ad10-90", "ovs_interfaceid": "0b68ad10-900f-4830-8982-2ad39bf5724d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.242709] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a308e8e2-90b7-4cb6-94ac-d1249fbeea88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.253041] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa4a90e-e805-431e-93d6-db499ce554d4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.269055] env[62974]: DEBUG nova.compute.provider_tree [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.448543] env[62974]: INFO nova.compute.manager [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Took 41.90 seconds to build instance. [ 632.689807] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526e86dd-cefb-7c55-5b8b-69b2944ed397, 'name': SearchDatastore_Task, 'duration_secs': 0.027626} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.690328] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.690682] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 632.691122] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.691480] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 632.691812] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a3dc271-acd1-470a-a82f-bd856a78245d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.693914] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-532ebaa9-8fc8-4f03-9047-6f9360ba492c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.697664] env[62974]: DEBUG nova.network.neutron [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updated VIF entry in instance network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 632.698107] env[62974]: DEBUG nova.network.neutron [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [{"id": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "address": "fa:16:3e:cb:fb:0a", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dbab348-e4", "ovs_interfaceid": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.705195] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 632.705195] env[62974]: value = "task-2653871" [ 632.705195] env[62974]: _type = "Task" [ 632.705195] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.707124] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 632.708333] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 632.717447] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af82006b-160b-4a40-b282-4e8defaa25fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.722145] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.724366] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 632.724366] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52850749-4906-821d-1887-cd8fd170c143" [ 632.724366] env[62974]: _type = "Task" [ 632.724366] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.736427] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52850749-4906-821d-1887-cd8fd170c143, 'name': SearchDatastore_Task, 'duration_secs': 0.008971} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.736427] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e28f4aeb-bb06-4c21-94c6-872368e963b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.744017] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 632.744017] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5272480c-9b1a-2694-b682-c499510c5f51" [ 632.744017] env[62974]: _type = "Task" [ 632.744017] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.747407] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Releasing lock "refresh_cache-a7a014b9-10e1-45a0-85da-4754051e8d82" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.747962] env[62974]: DEBUG nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Instance network_info: |[{"id": "0b68ad10-900f-4830-8982-2ad39bf5724d", "address": "fa:16:3e:86:b0:df", "network": {"id": "c5e11726-b0f4-41a0-a30a-78b46f9826e3", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-918475303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9a9caff8b384acdb8294a5efac6df25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b68ad10-90", "ovs_interfaceid": "0b68ad10-900f-4830-8982-2ad39bf5724d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 632.752700] env[62974]: DEBUG oslo_concurrency.lockutils [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] Acquired lock "refresh_cache-a7a014b9-10e1-45a0-85da-4754051e8d82" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.753024] env[62974]: DEBUG nova.network.neutron [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Refreshing network info cache for port 0b68ad10-900f-4830-8982-2ad39bf5724d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 632.754530] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:b0:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15ff34f9-4b02-4be1-b433-3ec4bd1b37c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b68ad10-900f-4830-8982-2ad39bf5724d', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 632.763975] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Creating folder: Project (a9a9caff8b384acdb8294a5efac6df25). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 632.763975] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5272480c-9b1a-2694-b682-c499510c5f51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.764321] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f79666a3-f773-4be6-8d61-1b6f0a1da2a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.772895] env[62974]: DEBUG nova.scheduler.client.report [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.778114] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Created folder: Project (a9a9caff8b384acdb8294a5efac6df25) in parent group-v535199. [ 632.778223] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Creating folder: Instances. Parent ref: group-v535280. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 632.778667] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29ec8e26-6138-4525-8a67-e6a233f77c7f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.788936] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Created folder: Instances in parent group-v535280. [ 632.788936] env[62974]: DEBUG oslo.service.loopingcall [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 632.788936] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 632.789414] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0d50e00-576f-4118-a7d8-206cffe084e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.808011] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 632.808011] env[62974]: value = "task-2653874" [ 632.808011] env[62974]: _type = "Task" [ 632.808011] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.817410] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653874, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.952479] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df4b8e4a-fe5c-45c2-83a8-a1c50fd16087 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "22a0a34a-c46b-4246-9a80-3540550bd793" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.120s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.207324] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] Releasing lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.207655] env[62974]: DEBUG nova.compute.manager [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Received event network-changed-e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 633.207838] env[62974]: DEBUG nova.compute.manager [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Refreshing instance network info cache due to event network-changed-e4309fab-6f6a-4cb4-8401-082b264bf2b9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 633.208098] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] Acquiring lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.208249] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] Acquired lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.208412] env[62974]: DEBUG nova.network.neutron [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Refreshing network info cache for port e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 633.210541] env[62974]: DEBUG nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 633.223300] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653871, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.249757] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 633.250015] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.250238] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 633.250364] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.250505] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 633.250646] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 633.250851] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 633.251435] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 633.251435] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 633.251435] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 633.251575] env[62974]: DEBUG nova.virt.hardware [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 633.252648] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b287a039-587c-4258-8b6c-1f4a54c476fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.262255] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5272480c-9b1a-2694-b682-c499510c5f51, 'name': SearchDatastore_Task, 'duration_secs': 0.009288} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.263407] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd1a66d-df0a-412e-ab78-89cd73d8f235 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.267141] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.267391] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 30fcd64c-4570-454b-a7e5-3246c92d90fc/30fcd64c-4570-454b-a7e5-3246c92d90fc.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 633.269529] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39828508-3153-4e88-945e-82191d0a4ef7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.281610] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.100s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.285728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.395s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.286578] env[62974]: INFO nova.compute.claims [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.289538] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 633.289538] env[62974]: value = "task-2653875" [ 633.289538] env[62974]: _type = "Task" [ 633.289538] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.299606] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653875, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.307492] env[62974]: INFO nova.scheduler.client.report [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Deleted allocations for instance 001557f9-ea50-4e86-9eeb-dd4436791453 [ 633.318354] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653874, 'name': CreateVM_Task, 'duration_secs': 0.438936} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.318518] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 633.319220] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.319380] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.319686] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 633.319930] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f20da8c5-5a6e-4a4b-930a-9741b104dbf6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.326367] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 633.326367] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c841b-16ba-1942-50f3-3a1781b89aa7" [ 633.326367] env[62974]: _type = "Task" [ 633.326367] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.333183] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c841b-16ba-1942-50f3-3a1781b89aa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.457618] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 633.468137] env[62974]: DEBUG nova.network.neutron [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Updated VIF entry in instance network info cache for port 0b68ad10-900f-4830-8982-2ad39bf5724d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 633.468137] env[62974]: DEBUG nova.network.neutron [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Updating instance_info_cache with network_info: [{"id": "0b68ad10-900f-4830-8982-2ad39bf5724d", "address": "fa:16:3e:86:b0:df", "network": {"id": "c5e11726-b0f4-41a0-a30a-78b46f9826e3", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-918475303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9a9caff8b384acdb8294a5efac6df25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b68ad10-90", "ovs_interfaceid": "0b68ad10-900f-4830-8982-2ad39bf5724d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.491019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.491019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.491019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.491019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.491499] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.494594] env[62974]: INFO nova.compute.manager [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Terminating instance [ 633.668321] env[62974]: DEBUG nova.compute.manager [None req-db97563c-4ac9-433f-b7e7-9b58acb5739c tempest-ServerExternalEventsTest-1921310671 tempest-ServerExternalEventsTest-1921310671-project] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Received event network-changed {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 633.668321] env[62974]: DEBUG nova.compute.manager [None req-db97563c-4ac9-433f-b7e7-9b58acb5739c tempest-ServerExternalEventsTest-1921310671 tempest-ServerExternalEventsTest-1921310671-project] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Refreshing instance network info cache due to event network-changed. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 633.668472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-db97563c-4ac9-433f-b7e7-9b58acb5739c tempest-ServerExternalEventsTest-1921310671 tempest-ServerExternalEventsTest-1921310671-project] Acquiring lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.668563] env[62974]: DEBUG oslo_concurrency.lockutils [None req-db97563c-4ac9-433f-b7e7-9b58acb5739c tempest-ServerExternalEventsTest-1921310671 tempest-ServerExternalEventsTest-1921310671-project] Acquired lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.668670] env[62974]: DEBUG nova.network.neutron [None req-db97563c-4ac9-433f-b7e7-9b58acb5739c tempest-ServerExternalEventsTest-1921310671 tempest-ServerExternalEventsTest-1921310671-project] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 633.725443] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653871, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534229} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.725822] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 633.726102] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 633.726419] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-037cd260-fcbb-42cc-8d30-0e8556017ae4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.735720] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 633.735720] env[62974]: value = "task-2653877" [ 633.735720] env[62974]: _type = "Task" [ 633.735720] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.744678] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.806189] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653875, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495443} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.806629] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 30fcd64c-4570-454b-a7e5-3246c92d90fc/30fcd64c-4570-454b-a7e5-3246c92d90fc.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 633.807025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 633.808556] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f07cf3cc-31d9-4323-8390-49f37338583a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.821735] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 633.821735] env[62974]: value = "task-2653878" [ 633.821735] env[62974]: _type = "Task" [ 633.821735] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.822662] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17523433-4c8e-41b4-a5bf-a061650c80ee tempest-FloatingIPsAssociationNegativeTestJSON-68213292 tempest-FloatingIPsAssociationNegativeTestJSON-68213292-project-member] Lock "001557f9-ea50-4e86-9eeb-dd4436791453" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.922s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.836704] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.841758] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c841b-16ba-1942-50f3-3a1781b89aa7, 'name': SearchDatastore_Task, 'duration_secs': 0.009341} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.842118] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.842421] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 633.842703] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.842908] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.843148] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 633.843672] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d528346a-98e7-4f6a-afef-ee852365fb14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.856278] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 633.856278] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 633.856893] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c92fe25-fbfc-42bc-8013-fc59b7ea0696 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.865986] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 633.865986] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d29f0-569b-41f7-1759-a5d8df8c390c" [ 633.865986] env[62974]: _type = "Task" [ 633.865986] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.876479] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d29f0-569b-41f7-1759-a5d8df8c390c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.967831] env[62974]: DEBUG oslo_concurrency.lockutils [req-99d29697-27aa-422c-887e-d10112b3270a req-3a2287a0-3eea-44dc-b88c-5b1271984d7a service nova] Releasing lock "refresh_cache-a7a014b9-10e1-45a0-85da-4754051e8d82" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.985944] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.999683] env[62974]: DEBUG nova.compute.manager [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 633.999916] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 634.000962] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d6da92-4220-4247-8bfe-dbeef87e1610 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.007996] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 634.008300] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19a849f8-c0e9-4cf3-814b-aeb0651a748a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.014466] env[62974]: DEBUG oslo_vmware.api [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 634.014466] env[62974]: value = "task-2653879" [ 634.014466] env[62974]: _type = "Task" [ 634.014466] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.024127] env[62974]: DEBUG oslo_vmware.api [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.045826] env[62974]: DEBUG nova.network.neutron [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updated VIF entry in instance network info cache for port e4309fab-6f6a-4cb4-8401-082b264bf2b9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 634.046450] env[62974]: DEBUG nova.network.neutron [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updating instance_info_cache with network_info: [{"id": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "address": "fa:16:3e:be:27:61", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4309fab-6f", "ovs_interfaceid": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.095845] env[62974]: DEBUG nova.compute.manager [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Received event network-changed-e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 634.096780] env[62974]: DEBUG nova.compute.manager [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Refreshing instance network info cache due to event network-changed-e4309fab-6f6a-4cb4-8401-082b264bf2b9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 634.096780] env[62974]: DEBUG oslo_concurrency.lockutils [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] Acquiring lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.130615] env[62974]: DEBUG nova.network.neutron [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Successfully updated port: 36d99cd3-daa8-4da2-b43f-85af2aaa66db {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 634.245531] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144105} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.245867] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 634.246828] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e373c8-8e27-46c9-a47b-fa53f03f53bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.268285] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 634.268668] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe8f3f6e-cdc9-4940-97fb-8b73a1bbbb4a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.291083] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 634.291083] env[62974]: value = "task-2653880" [ 634.291083] env[62974]: _type = "Task" [ 634.291083] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.298547] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653880, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.338995] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071639} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.338995] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 634.339789] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b40e80e-1b41-4c37-9cb0-61b383e4b79c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.367561] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 30fcd64c-4570-454b-a7e5-3246c92d90fc/30fcd64c-4570-454b-a7e5-3246c92d90fc.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 634.370630] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-062486bb-db2f-4caf-a662-fb7018352046 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.397278] env[62974]: DEBUG nova.compute.manager [req-861a7ea2-43f1-4497-8633-0663a8d9625d req-9ea0ebf1-355c-46e4-9f15-56fbdc98c829 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Received event network-vif-plugged-36d99cd3-daa8-4da2-b43f-85af2aaa66db {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 634.397278] env[62974]: DEBUG oslo_concurrency.lockutils [req-861a7ea2-43f1-4497-8633-0663a8d9625d req-9ea0ebf1-355c-46e4-9f15-56fbdc98c829 service nova] Acquiring lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.397278] env[62974]: DEBUG oslo_concurrency.lockutils [req-861a7ea2-43f1-4497-8633-0663a8d9625d req-9ea0ebf1-355c-46e4-9f15-56fbdc98c829 service nova] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.397278] env[62974]: DEBUG oslo_concurrency.lockutils [req-861a7ea2-43f1-4497-8633-0663a8d9625d req-9ea0ebf1-355c-46e4-9f15-56fbdc98c829 service nova] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.397455] env[62974]: DEBUG nova.compute.manager [req-861a7ea2-43f1-4497-8633-0663a8d9625d req-9ea0ebf1-355c-46e4-9f15-56fbdc98c829 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] No waiting events found dispatching network-vif-plugged-36d99cd3-daa8-4da2-b43f-85af2aaa66db {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 634.398500] env[62974]: WARNING nova.compute.manager [req-861a7ea2-43f1-4497-8633-0663a8d9625d req-9ea0ebf1-355c-46e4-9f15-56fbdc98c829 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Received unexpected event network-vif-plugged-36d99cd3-daa8-4da2-b43f-85af2aaa66db for instance with vm_state building and task_state spawning. [ 634.407499] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d29f0-569b-41f7-1759-a5d8df8c390c, 'name': SearchDatastore_Task, 'duration_secs': 0.011616} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.409174] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 634.409174] env[62974]: value = "task-2653881" [ 634.409174] env[62974]: _type = "Task" [ 634.409174] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.409669] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0c73aab-1e7a-4091-b00a-7f5628763c43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.420072] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 634.420072] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527bea57-ec41-1dbe-45c7-b80ec254f89f" [ 634.420072] env[62974]: _type = "Task" [ 634.420072] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.434285] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527bea57-ec41-1dbe-45c7-b80ec254f89f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.443285] env[62974]: DEBUG nova.network.neutron [None req-db97563c-4ac9-433f-b7e7-9b58acb5739c tempest-ServerExternalEventsTest-1921310671 tempest-ServerExternalEventsTest-1921310671-project] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Updating instance_info_cache with network_info: [{"id": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "address": "fa:16:3e:e5:8b:fe", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22854a7f-ed", "ovs_interfaceid": "22854a7f-ed93-414f-9a4b-b5b486459cc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.529407] env[62974]: DEBUG oslo_vmware.api [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653879, 'name': PowerOffVM_Task, 'duration_secs': 0.216371} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.529685] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 634.529834] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 634.530291] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca9975bd-4abb-42b7-8fe9-cd2c26dfcf27 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.555564] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb611dd6-2b7d-4800-b30a-296540dc225a req-e27db059-fdbc-48a1-b909-9dd253d0678e service nova] Releasing lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.555983] env[62974]: DEBUG oslo_concurrency.lockutils [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] Acquired lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.556218] env[62974]: DEBUG nova.network.neutron [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Refreshing network info cache for port e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.608022] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 634.608022] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 634.608022] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Deleting the datastore file [datastore2] b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 634.608022] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0248c18d-20c8-4e69-adef-1e7cc85c5ec7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.611537] env[62974]: DEBUG oslo_vmware.api [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 634.611537] env[62974]: value = "task-2653883" [ 634.611537] env[62974]: _type = "Task" [ 634.611537] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.623022] env[62974]: DEBUG oslo_vmware.api [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653883, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.634460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.634605] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.634748] env[62974]: DEBUG nova.network.neutron [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.665157] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "22a0a34a-c46b-4246-9a80-3540550bd793" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.665379] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "22a0a34a-c46b-4246-9a80-3540550bd793" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.665576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "22a0a34a-c46b-4246-9a80-3540550bd793-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.665762] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "22a0a34a-c46b-4246-9a80-3540550bd793-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.665916] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "22a0a34a-c46b-4246-9a80-3540550bd793-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.670279] env[62974]: INFO nova.compute.manager [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Terminating instance [ 634.801140] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.861357] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fc82c0-1551-48ac-a3e2-66de99d0f3d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.869314] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef441e13-9862-4447-9c2a-62a990b8dfea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.903046] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0da7530-3a30-4850-8a03-ce3498850d4c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.910477] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242f28e3-c8d4-457f-9092-4a0b277f8a21 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.930714] env[62974]: DEBUG nova.compute.provider_tree [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.932124] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653881, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.940682] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527bea57-ec41-1dbe-45c7-b80ec254f89f, 'name': SearchDatastore_Task, 'duration_secs': 0.023} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.941042] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.941187] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a7a014b9-10e1-45a0-85da-4754051e8d82/a7a014b9-10e1-45a0-85da-4754051e8d82.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 634.941447] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dcf638d-190d-4f5a-aed7-6cc1b4cb9bb1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.945537] env[62974]: DEBUG oslo_concurrency.lockutils [None req-db97563c-4ac9-433f-b7e7-9b58acb5739c tempest-ServerExternalEventsTest-1921310671 tempest-ServerExternalEventsTest-1921310671-project] Releasing lock "refresh_cache-22a0a34a-c46b-4246-9a80-3540550bd793" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.948619] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 634.948619] env[62974]: value = "task-2653884" [ 634.948619] env[62974]: _type = "Task" [ 634.948619] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.957501] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.121072] env[62974]: DEBUG oslo_vmware.api [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653883, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141277} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.121378] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.121620] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 635.121745] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.121932] env[62974]: INFO nova.compute.manager [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Took 1.12 seconds to destroy the instance on the hypervisor. [ 635.122338] env[62974]: DEBUG oslo.service.loopingcall [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.122457] env[62974]: DEBUG nova.compute.manager [-] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 635.122553] env[62974]: DEBUG nova.network.neutron [-] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.174363] env[62974]: DEBUG nova.compute.manager [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 635.174666] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 635.175740] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0e6c8d-4294-4fca-b7ec-06f810970af2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.184928] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 635.185253] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c14d80ff-7784-4745-95f4-f35dbf3e55dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.191857] env[62974]: DEBUG oslo_vmware.api [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 635.191857] env[62974]: value = "task-2653885" [ 635.191857] env[62974]: _type = "Task" [ 635.191857] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.200478] env[62974]: DEBUG oslo_vmware.api [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.203867] env[62974]: DEBUG nova.network.neutron [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.301161] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.425942] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653881, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.434926] env[62974]: DEBUG nova.scheduler.client.report [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 635.458166] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653884, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.617680] env[62974]: DEBUG nova.network.neutron [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Updating instance_info_cache with network_info: [{"id": "36d99cd3-daa8-4da2-b43f-85af2aaa66db", "address": "fa:16:3e:d9:17:83", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d99cd3-da", "ovs_interfaceid": "36d99cd3-daa8-4da2-b43f-85af2aaa66db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.638405] env[62974]: DEBUG nova.network.neutron [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updated VIF entry in instance network info cache for port e4309fab-6f6a-4cb4-8401-082b264bf2b9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 635.638683] env[62974]: DEBUG nova.network.neutron [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updating instance_info_cache with network_info: [{"id": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "address": "fa:16:3e:be:27:61", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4309fab-6f", "ovs_interfaceid": "e4309fab-6f6a-4cb4-8401-082b264bf2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.702700] env[62974]: DEBUG oslo_vmware.api [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653885, 'name': PowerOffVM_Task, 'duration_secs': 0.216952} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.705525] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 635.707018] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 635.707018] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7efe0989-1267-48ea-9358-dc9fa1d58084 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.780731] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 635.780949] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 635.781148] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Deleting the datastore file [datastore2] 22a0a34a-c46b-4246-9a80-3540550bd793 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.781404] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45a301b7-1af8-42ae-a904-a0ecec1bde03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.789159] env[62974]: DEBUG oslo_vmware.api [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for the task: (returnval){ [ 635.789159] env[62974]: value = "task-2653887" [ 635.789159] env[62974]: _type = "Task" [ 635.789159] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.805379] env[62974]: DEBUG oslo_vmware.api [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.809189] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653880, 'name': ReconfigVM_Task, 'duration_secs': 1.170317} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.809466] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 635.810662] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d74e56dc-71b4-45f9-a12c-7473af95766a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.816524] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 635.816524] env[62974]: value = "task-2653888" [ 635.816524] env[62974]: _type = "Task" [ 635.816524] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.824568] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653888, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.927031] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653881, 'name': ReconfigVM_Task, 'duration_secs': 1.056681} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.927339] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 30fcd64c-4570-454b-a7e5-3246c92d90fc/30fcd64c-4570-454b-a7e5-3246c92d90fc.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 635.928046] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-292a7584-c4ad-4c12-9549-45245182b0bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.934803] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 635.934803] env[62974]: value = "task-2653889" [ 635.934803] env[62974]: _type = "Task" [ 635.934803] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.943975] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.943975] env[62974]: DEBUG nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.948722] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.758s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.950315] env[62974]: INFO nova.compute.claims [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.953714] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653889, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.966574] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52033} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.966985] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a7a014b9-10e1-45a0-85da-4754051e8d82/a7a014b9-10e1-45a0-85da-4754051e8d82.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.967272] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.967570] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0babafd3-ec21-44a0-941f-f5e00e4ef901 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.973819] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 635.973819] env[62974]: value = "task-2653890" [ 635.973819] env[62974]: _type = "Task" [ 635.973819] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.982737] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.120079] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.120436] env[62974]: DEBUG nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Instance network_info: |[{"id": "36d99cd3-daa8-4da2-b43f-85af2aaa66db", "address": "fa:16:3e:d9:17:83", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d99cd3-da", "ovs_interfaceid": "36d99cd3-daa8-4da2-b43f-85af2aaa66db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 636.120868] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:17:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36d99cd3-daa8-4da2-b43f-85af2aaa66db', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.135147] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating folder: Project (14dd4a9a77ad40458d40bb82ac4b90a3). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.135805] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cb34d78-9171-4e55-b72f-b6333b5a28b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.142875] env[62974]: DEBUG oslo_concurrency.lockutils [req-5a3ecdd2-f6ea-4060-a35a-bae29494020c req-60cb0457-ddb2-4c8d-a734-a40c367d834a service nova] Releasing lock "refresh_cache-b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.147824] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created folder: Project (14dd4a9a77ad40458d40bb82ac4b90a3) in parent group-v535199. [ 636.148017] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating folder: Instances. Parent ref: group-v535283. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.148278] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83152816-06f2-4e28-a298-ea5e41718a2a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.156366] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created folder: Instances in parent group-v535283. [ 636.156600] env[62974]: DEBUG oslo.service.loopingcall [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.156785] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 636.156997] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6293903e-7c9d-4f66-8526-b1b590b8913c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.181270] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.181270] env[62974]: value = "task-2653893" [ 636.181270] env[62974]: _type = "Task" [ 636.181270] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.190467] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653893, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.209415] env[62974]: DEBUG nova.network.neutron [-] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.300232] env[62974]: DEBUG oslo_vmware.api [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Task: {'id': task-2653887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389043} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.300459] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 636.300653] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 636.300826] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 636.301072] env[62974]: INFO nova.compute.manager [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Took 1.13 seconds to destroy the instance on the hypervisor. [ 636.301388] env[62974]: DEBUG oslo.service.loopingcall [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.301670] env[62974]: DEBUG nova.compute.manager [-] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 636.301947] env[62974]: DEBUG nova.network.neutron [-] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.327721] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653888, 'name': Rename_Task, 'duration_secs': 0.138735} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.327721] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 636.328361] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96566899-32f3-4c08-9124-131b10cb46f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.335391] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 636.335391] env[62974]: value = "task-2653894" [ 636.335391] env[62974]: _type = "Task" [ 636.335391] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.346280] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.445520] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653889, 'name': Rename_Task, 'duration_secs': 0.152052} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.445788] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 636.446099] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76231f44-de87-4baa-87f3-5ea7a4651938 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.452925] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 636.452925] env[62974]: value = "task-2653895" [ 636.452925] env[62974]: _type = "Task" [ 636.452925] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.457163] env[62974]: DEBUG nova.compute.utils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 636.462025] env[62974]: DEBUG nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 636.462025] env[62974]: DEBUG nova.network.neutron [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 636.468698] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.483804] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067927} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.484326] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 636.485089] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0680b1-3f76-43d4-a778-c336065c7c28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.512168] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] a7a014b9-10e1-45a0-85da-4754051e8d82/a7a014b9-10e1-45a0-85da-4754051e8d82.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 636.513036] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-199a9a5c-a751-4693-aff9-6c30bda29698 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.534966] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 636.534966] env[62974]: value = "task-2653896" [ 636.534966] env[62974]: _type = "Task" [ 636.534966] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.539873] env[62974]: DEBUG nova.policy [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7d087c222eb46d5a7f921dcd06a1cd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fa0b4b05efa4c328e4a203e324372c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.547632] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653896, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.557315] env[62974]: DEBUG nova.compute.manager [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 636.557315] env[62974]: DEBUG nova.compute.manager [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing instance network info cache due to event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 636.557315] env[62974]: DEBUG oslo_concurrency.lockutils [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] Acquiring lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.557315] env[62974]: DEBUG oslo_concurrency.lockutils [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] Acquired lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.557315] env[62974]: DEBUG nova.network.neutron [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.691903] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653893, 'name': CreateVM_Task, 'duration_secs': 0.361233} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.691903] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 636.692399] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.692558] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.693082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 636.693144] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13a7713e-693a-414d-bb95-d146fd75086a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.697878] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 636.697878] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5258bcc9-8835-3b60-2c82-914d9610c118" [ 636.697878] env[62974]: _type = "Task" [ 636.697878] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.705357] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5258bcc9-8835-3b60-2c82-914d9610c118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.712427] env[62974]: INFO nova.compute.manager [-] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Took 1.59 seconds to deallocate network for instance. [ 636.795324] env[62974]: DEBUG nova.compute.manager [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Received event network-changed-36d99cd3-daa8-4da2-b43f-85af2aaa66db {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 636.795499] env[62974]: DEBUG nova.compute.manager [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Refreshing instance network info cache due to event network-changed-36d99cd3-daa8-4da2-b43f-85af2aaa66db. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 636.795716] env[62974]: DEBUG oslo_concurrency.lockutils [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] Acquiring lock "refresh_cache-79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.795878] env[62974]: DEBUG oslo_concurrency.lockutils [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] Acquired lock "refresh_cache-79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.796282] env[62974]: DEBUG nova.network.neutron [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Refreshing network info cache for port 36d99cd3-daa8-4da2-b43f-85af2aaa66db {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.853658] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653894, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.964137] env[62974]: DEBUG nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.969571] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653895, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.047750] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653896, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.053285] env[62974]: DEBUG nova.network.neutron [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Successfully created port: 2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.208205] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5258bcc9-8835-3b60-2c82-914d9610c118, 'name': SearchDatastore_Task, 'duration_secs': 0.010186} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.210402] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.210648] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 637.210882] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.211037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.211223] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.213657] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72158f98-f816-4bb4-9794-2e3318a488f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.220046] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.228075] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.228287] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 637.229057] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe8ccb69-ebff-4e8d-9b91-9074a42c3b09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.239948] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 637.239948] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523728b0-55a5-243d-438f-b6af29726e00" [ 637.239948] env[62974]: _type = "Task" [ 637.239948] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.250618] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523728b0-55a5-243d-438f-b6af29726e00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.317016] env[62974]: DEBUG nova.network.neutron [-] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.348820] env[62974]: DEBUG oslo_vmware.api [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653894, 'name': PowerOnVM_Task, 'duration_secs': 0.676558} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.352129] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 637.352490] env[62974]: DEBUG nova.compute.manager [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 637.354100] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d48a34-3a50-4959-9550-1a0e75eae287 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.458327] env[62974]: DEBUG nova.network.neutron [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updated VIF entry in instance network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 637.458664] env[62974]: DEBUG nova.network.neutron [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [{"id": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "address": "fa:16:3e:cb:fb:0a", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dbab348-e4", "ovs_interfaceid": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.475286] env[62974]: DEBUG oslo_vmware.api [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653895, 'name': PowerOnVM_Task, 'duration_secs': 0.668534} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.480016] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 637.480016] env[62974]: INFO nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Took 9.69 seconds to spawn the instance on the hypervisor. [ 637.480016] env[62974]: DEBUG nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 637.480016] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675c26d5-133b-4da1-914d-8620d13df49d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.549466] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653896, 'name': ReconfigVM_Task, 'duration_secs': 0.551335} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.553114] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Reconfigured VM instance instance-0000001b to attach disk [datastore1] a7a014b9-10e1-45a0-85da-4754051e8d82/a7a014b9-10e1-45a0-85da-4754051e8d82.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 637.563507] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a27156f4-84f1-4f76-ba2a-dc304968efa6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.565836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Acquiring lock "a14e7e40-afef-4607-8fa9-935a92ea49dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.566096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.572864] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 637.572864] env[62974]: value = "task-2653897" [ 637.572864] env[62974]: _type = "Task" [ 637.572864] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.584637] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9056ef6-16e9-43c8-be21-31dc4ef9144e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.590929] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653897, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.596751] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8024bb50-b6bb-4756-802a-ef503786ab1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.638588] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59179869-46dd-461f-bb12-28f8b0667985 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.648254] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317e9470-e2d4-4811-996b-24d1002cf759 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.662382] env[62974]: DEBUG nova.compute.provider_tree [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.675185] env[62974]: DEBUG nova.network.neutron [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Updated VIF entry in instance network info cache for port 36d99cd3-daa8-4da2-b43f-85af2aaa66db. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 637.675611] env[62974]: DEBUG nova.network.neutron [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Updating instance_info_cache with network_info: [{"id": "36d99cd3-daa8-4da2-b43f-85af2aaa66db", "address": "fa:16:3e:d9:17:83", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d99cd3-da", "ovs_interfaceid": "36d99cd3-daa8-4da2-b43f-85af2aaa66db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.747815] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523728b0-55a5-243d-438f-b6af29726e00, 'name': SearchDatastore_Task, 'duration_secs': 0.009618} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.748622] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7986190e-d56a-43e0-abff-24d08d95ed7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.754259] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 637.754259] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d4fb48-8be5-3584-f1b8-0f2c4a03f6e1" [ 637.754259] env[62974]: _type = "Task" [ 637.754259] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.762314] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d4fb48-8be5-3584-f1b8-0f2c4a03f6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.822172] env[62974]: INFO nova.compute.manager [-] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Took 1.52 seconds to deallocate network for instance. [ 637.878122] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.968054] env[62974]: DEBUG oslo_concurrency.lockutils [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] Releasing lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.968379] env[62974]: DEBUG nova.compute.manager [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 637.968632] env[62974]: DEBUG nova.compute.manager [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing instance network info cache due to event network-changed-7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 637.968865] env[62974]: DEBUG oslo_concurrency.lockutils [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] Acquiring lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.969034] env[62974]: DEBUG oslo_concurrency.lockutils [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] Acquired lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.969244] env[62974]: DEBUG nova.network.neutron [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Refreshing network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 637.979582] env[62974]: DEBUG nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 638.003507] env[62974]: INFO nova.compute.manager [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Took 39.09 seconds to build instance. [ 638.009274] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 638.009511] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.009674] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 638.009837] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.010131] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 638.010131] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 638.010332] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 638.010491] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 638.010645] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 638.010803] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 638.010974] env[62974]: DEBUG nova.virt.hardware [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 638.011861] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f651bfff-4551-439b-b3a5-e30446f50647 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.021150] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bde22a-9f51-46e6-8d30-b1ce9a51c7f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.083209] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653897, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.166575] env[62974]: DEBUG nova.scheduler.client.report [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 638.177809] env[62974]: DEBUG oslo_concurrency.lockutils [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] Releasing lock "refresh_cache-79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.178140] env[62974]: DEBUG nova.compute.manager [req-2092411e-d06b-486e-b72b-c1a4066c92c6 req-a041656e-bf95-476d-a936-0ba54dc73518 service nova] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Received event network-vif-deleted-e4309fab-6f6a-4cb4-8401-082b264bf2b9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 638.265119] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d4fb48-8be5-3584-f1b8-0f2c4a03f6e1, 'name': SearchDatastore_Task, 'duration_secs': 0.009196} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.265520] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.265847] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7/79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 638.266195] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a97e00c-178c-4575-b099-0dc90d076a44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.273875] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 638.273875] env[62974]: value = "task-2653898" [ 638.273875] env[62974]: _type = "Task" [ 638.273875] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.277254] env[62974]: INFO nova.compute.manager [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Rebuilding instance [ 638.283781] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.317774] env[62974]: DEBUG nova.compute.manager [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.318695] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adfd068-24a1-4f2f-a2e6-fe88fa46f149 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.329194] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.506114] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bdda40d-ae8e-4e69-8d5f-0d27e709d457 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.002s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.585859] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653897, 'name': Rename_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.672997] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "30fcd64c-4570-454b-a7e5-3246c92d90fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.672997] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.672997] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "30fcd64c-4570-454b-a7e5-3246c92d90fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.672997] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.673480] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.674413] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.675121] env[62974]: DEBUG nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 638.679869] env[62974]: INFO nova.compute.manager [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Terminating instance [ 638.686017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.199s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.687999] env[62974]: INFO nova.compute.claims [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.788410] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653898, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47646} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.788745] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7/79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 638.788954] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 638.789247] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fdda7819-92ad-46b8-9bf8-8501adc1102a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.795578] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 638.795578] env[62974]: value = "task-2653899" [ 638.795578] env[62974]: _type = "Task" [ 638.795578] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.804499] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.805416] env[62974]: DEBUG nova.network.neutron [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updated VIF entry in instance network info cache for port 7dbab348-e4dd-46db-ae81-292fbfcd16dc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 638.805845] env[62974]: DEBUG nova.network.neutron [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [{"id": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "address": "fa:16:3e:cb:fb:0a", "network": {"id": "22fd8460-1d41-493c-97b4-7f1ccf0c42ae", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1507003379-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca220df51dc0414ea400a56fe5e49e1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88eedc4b-66dc-4845-9f95-858d6db12a7f", "external-id": "nsx-vlan-transportzone-999", "segmentation_id": 999, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dbab348-e4", "ovs_interfaceid": "7dbab348-e4dd-46db-ae81-292fbfcd16dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.828683] env[62974]: DEBUG nova.compute.manager [req-71574140-42ed-4fc7-a230-4e767e59e313 req-598f68f5-73f9-45a5-9968-4ae2cf22c5c4 service nova] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Received event network-vif-deleted-22854a7f-ed93-414f-9a4b-b5b486459cc8 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 638.931837] env[62974]: DEBUG nova.network.neutron [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Successfully updated port: 2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 639.008080] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 639.083482] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653897, 'name': Rename_Task, 'duration_secs': 1.289123} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.083750] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 639.083994] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd767a59-0162-4980-9050-cba76ce95ad0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.091112] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 639.091112] env[62974]: value = "task-2653900" [ 639.091112] env[62974]: _type = "Task" [ 639.091112] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.098192] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653900, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.199065] env[62974]: DEBUG nova.compute.utils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 639.200201] env[62974]: DEBUG nova.compute.manager [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 639.200385] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 639.200968] env[62974]: DEBUG nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Not allocating networking since 'none' was specified. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 639.201611] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9ff6a7-1f0a-4415-8436-bacdef718c68 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.209708] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 639.209963] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea0d0266-57d7-4227-99a6-46a6d8f5e0fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.218139] env[62974]: DEBUG oslo_vmware.api [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 639.218139] env[62974]: value = "task-2653901" [ 639.218139] env[62974]: _type = "Task" [ 639.218139] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.226732] env[62974]: DEBUG oslo_vmware.api [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653901, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.306950] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06652} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.307467] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 639.308554] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a23ad0-c11f-4450-aa0f-fcf6ceb1e0bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.311671] env[62974]: DEBUG oslo_concurrency.lockutils [req-c14f2022-bc8f-41aa-a10b-43e2cd4f891b req-36b59eae-a33f-46d4-abcc-07b2b39ef7e5 service nova] Releasing lock "refresh_cache-a63aa120-1c7b-4abc-93cf-4d138f5cebde" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.333642] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7/79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 639.334237] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 639.334458] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9627944-65e8-4b95-9117-acdeac8d788d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.349488] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ad562d2-d9a6-4ed1-a0b7-4fcd220b8cb4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.356761] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 639.356761] env[62974]: value = "task-2653902" [ 639.356761] env[62974]: _type = "Task" [ 639.356761] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.358164] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 639.358164] env[62974]: value = "task-2653903" [ 639.358164] env[62974]: _type = "Task" [ 639.358164] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.374223] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.374534] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653903, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.439968] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "refresh_cache-605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.439968] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquired lock "refresh_cache-605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.439968] env[62974]: DEBUG nova.network.neutron [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 639.535055] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.604268] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653900, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.705124] env[62974]: DEBUG nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 639.735790] env[62974]: DEBUG oslo_vmware.api [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653901, 'name': PowerOffVM_Task, 'duration_secs': 0.300291} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.736094] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 639.736276] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 639.736524] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bbfc210-9ad9-4180-8a56-8e2e999664ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.809648] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 639.809878] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 639.810092] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Deleting the datastore file [datastore1] 30fcd64c-4570-454b-a7e5-3246c92d90fc {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.813203] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89232dbf-3e6d-48fc-b6e4-bd703c0e5fc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.819920] env[62974]: DEBUG oslo_vmware.api [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for the task: (returnval){ [ 639.819920] env[62974]: value = "task-2653905" [ 639.819920] env[62974]: _type = "Task" [ 639.819920] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.828553] env[62974]: DEBUG oslo_vmware.api [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.870729] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653902, 'name': PowerOffVM_Task, 'duration_secs': 0.185088} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.877089] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 639.877476] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 639.877660] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653903, 'name': ReconfigVM_Task, 'duration_secs': 0.444264} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.878577] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251cdffc-f07e-40a3-b2a9-8fe0f11d55d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.881239] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7/79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 639.882164] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff7083ef-797e-4ae2-9ab5-665b905d8440 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.888232] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 639.889480] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cbc6880-a00d-454a-8f19-39e27247a80b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.891338] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 639.891338] env[62974]: value = "task-2653906" [ 639.891338] env[62974]: _type = "Task" [ 639.891338] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.902883] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653906, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.925745] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 639.926126] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 639.926416] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Deleting the datastore file [datastore1] 41f20cb7-c9f9-4201-ae16-4f977dae26cf {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.926837] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de325111-6d2c-476b-bafc-37cc62736e5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.935548] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 639.935548] env[62974]: value = "task-2653908" [ 639.935548] env[62974]: _type = "Task" [ 639.935548] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.945400] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.985196] env[62974]: DEBUG nova.network.neutron [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.105900] env[62974]: DEBUG oslo_vmware.api [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653900, 'name': PowerOnVM_Task, 'duration_secs': 0.908623} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.109571] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 640.109775] env[62974]: INFO nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Took 9.55 seconds to spawn the instance on the hypervisor. [ 640.109947] env[62974]: DEBUG nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.111060] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8913e427-d501-49d8-b66c-af8953483886 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.197187] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab31667-cc6b-4504-a216-da69d80fe924 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.206377] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bd637e-0e71-4b71-aa7a-66db1da8ce01 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.245336] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2b088d-b5ea-4f87-96f8-6dcd6b6b4314 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.253564] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b6d9f6-b142-4f98-9440-8db6cad716ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.269082] env[62974]: DEBUG nova.compute.provider_tree [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.329412] env[62974]: DEBUG oslo_vmware.api [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Task: {'id': task-2653905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144799} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.329713] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.329822] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 640.330494] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.330494] env[62974]: INFO nova.compute.manager [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 640.330494] env[62974]: DEBUG oslo.service.loopingcall [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.330650] env[62974]: DEBUG nova.compute.manager [-] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 640.330861] env[62974]: DEBUG nova.network.neutron [-] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 640.370566] env[62974]: DEBUG nova.network.neutron [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Updating instance_info_cache with network_info: [{"id": "2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909", "address": "fa:16:3e:1c:1f:8d", "network": {"id": "bf6a4921-9347-4bcc-958d-4ea747c93741", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1305353004-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fa0b4b05efa4c328e4a203e324372c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a54c79e-87", "ovs_interfaceid": "2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.404153] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653906, 'name': Rename_Task, 'duration_secs': 0.178133} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.404420] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 640.404651] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-343abe8b-a82e-43be-9d13-f82f418a8c8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.412842] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 640.412842] env[62974]: value = "task-2653909" [ 640.412842] env[62974]: _type = "Task" [ 640.412842] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.421586] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653909, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.445883] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104585} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.446234] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.446462] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 640.446662] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.631407] env[62974]: INFO nova.compute.manager [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Took 40.15 seconds to build instance. [ 640.715761] env[62974]: DEBUG nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 640.740832] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 640.741077] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.741241] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 640.741426] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.741568] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 640.741708] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 640.741912] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 640.742090] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 640.742339] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 640.742430] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 640.742596] env[62974]: DEBUG nova.virt.hardware [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 640.743489] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319afc55-f787-4a70-9209-1aabca53c534 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.751084] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7125d26-92ab-4ce1-9a87-ef13bfc35882 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.764417] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.770492] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Creating folder: Project (ceaad4beb0a845b4929981e562c659d4). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.770492] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3f6d3d0-2fec-481b-a76d-a33c7c405807 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.772696] env[62974]: DEBUG nova.scheduler.client.report [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 640.784600] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Created folder: Project (ceaad4beb0a845b4929981e562c659d4) in parent group-v535199. [ 640.785240] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Creating folder: Instances. Parent ref: group-v535286. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.785240] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-352ff5f7-1ef1-4e3a-ae67-4ba7a462e783 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.794419] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Created folder: Instances in parent group-v535286. [ 640.794659] env[62974]: DEBUG oslo.service.loopingcall [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.794880] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.795102] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2d05655-d7c5-403c-bab3-182700660586 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.812579] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.812579] env[62974]: value = "task-2653912" [ 640.812579] env[62974]: _type = "Task" [ 640.812579] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.820570] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653912, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.878882] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Releasing lock "refresh_cache-605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.879507] env[62974]: DEBUG nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Instance network_info: |[{"id": "2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909", "address": "fa:16:3e:1c:1f:8d", "network": {"id": "bf6a4921-9347-4bcc-958d-4ea747c93741", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1305353004-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fa0b4b05efa4c328e4a203e324372c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a54c79e-87", "ovs_interfaceid": "2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.882131] env[62974]: DEBUG nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Received event network-vif-plugged-2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 640.882471] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] Acquiring lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.882948] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.883224] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.883523] env[62974]: DEBUG nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] No waiting events found dispatching network-vif-plugged-2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 640.883959] env[62974]: WARNING nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Received unexpected event network-vif-plugged-2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 for instance with vm_state building and task_state spawning. [ 640.884149] env[62974]: DEBUG nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Received event network-changed-2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 640.884408] env[62974]: DEBUG nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Refreshing instance network info cache due to event network-changed-2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 640.884777] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] Acquiring lock "refresh_cache-605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.885078] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] Acquired lock "refresh_cache-605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.885407] env[62974]: DEBUG nova.network.neutron [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Refreshing network info cache for port 2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 640.887727] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:1f:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ef746c57-cd18-4883-a0e9-c52937aaf41d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.901112] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Creating folder: Project (0fa0b4b05efa4c328e4a203e324372c3). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.902774] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0dd67579-114c-41e0-b076-f20e10017dbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.912450] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Created folder: Project (0fa0b4b05efa4c328e4a203e324372c3) in parent group-v535199. [ 640.912647] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Creating folder: Instances. Parent ref: group-v535289. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.913136] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc9d2ed2-5ae3-42c9-957b-c628424f93c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.914903] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "a7a014b9-10e1-45a0-85da-4754051e8d82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.923309] env[62974]: DEBUG oslo_vmware.api [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653909, 'name': PowerOnVM_Task, 'duration_secs': 0.481454} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.923593] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 640.923870] env[62974]: INFO nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Took 7.71 seconds to spawn the instance on the hypervisor. [ 640.923966] env[62974]: DEBUG nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.925671] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83edd358-5a18-441e-a32c-2a299476ebcf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.928382] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Created folder: Instances in parent group-v535289. [ 640.928382] env[62974]: DEBUG oslo.service.loopingcall [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.928510] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.929068] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36b63a98-5418-4ce1-b7cf-b2fdf8642142 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.953473] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.953473] env[62974]: value = "task-2653915" [ 640.953473] env[62974]: _type = "Task" [ 640.953473] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.968591] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653915, 'name': CreateVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.065214] env[62974]: DEBUG nova.network.neutron [-] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.133507] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0f171cc5-1222-4754-b415-ed8635249831 tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.773s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.135095] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.220s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.135335] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "a7a014b9-10e1-45a0-85da-4754051e8d82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.135561] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.135781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.138992] env[62974]: INFO nova.compute.manager [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Terminating instance [ 641.277928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.278495] env[62974]: DEBUG nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 641.281674] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.563s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.281873] env[62974]: DEBUG nova.objects.instance [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 641.326600] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653912, 'name': CreateVM_Task, 'duration_secs': 0.344836} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.326790] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.327447] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.327447] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.327726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 641.327980] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d1aa210-f3ae-494e-9f34-0ed6f85572d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.333036] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 641.333036] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c97357-f74d-959b-0f10-d4f3590ba99b" [ 641.333036] env[62974]: _type = "Task" [ 641.333036] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.341515] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c97357-f74d-959b-0f10-d4f3590ba99b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.461367] env[62974]: INFO nova.compute.manager [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Took 38.89 seconds to build instance. [ 641.468303] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653915, 'name': CreateVM_Task, 'duration_secs': 0.378918} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.468738] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.469342] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.487887] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 641.488453] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.488648] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 641.488847] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.488998] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 641.489162] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 641.489401] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 641.489579] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 641.489759] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 641.489926] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 641.490117] env[62974]: DEBUG nova.virt.hardware [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 641.491201] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa8877c-9c63-4d3f-8f36-f91caaa78af9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.503069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057a538b-317e-4a36-ab5a-0a48c81aa983 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.519220] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.524100] env[62974]: DEBUG oslo.service.loopingcall [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.526073] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 641.526348] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7ee3615-19ec-4920-8654-ae51965bc6c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.544974] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.544974] env[62974]: value = "task-2653916" [ 641.544974] env[62974]: _type = "Task" [ 641.544974] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.556214] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653916, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.569534] env[62974]: INFO nova.compute.manager [-] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Took 1.24 seconds to deallocate network for instance. [ 641.643026] env[62974]: DEBUG nova.compute.manager [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 641.643176] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 641.643547] env[62974]: DEBUG nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 641.647213] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6872841e-c974-4edf-958a-7c057df7dd79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.655153] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 641.655400] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ae8322a-912e-4985-8681-86c362b71b9c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.661998] env[62974]: DEBUG oslo_vmware.api [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 641.661998] env[62974]: value = "task-2653917" [ 641.661998] env[62974]: _type = "Task" [ 641.661998] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.662779] env[62974]: DEBUG nova.network.neutron [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Updated VIF entry in instance network info cache for port 2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 641.663117] env[62974]: DEBUG nova.network.neutron [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Updating instance_info_cache with network_info: [{"id": "2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909", "address": "fa:16:3e:1c:1f:8d", "network": {"id": "bf6a4921-9347-4bcc-958d-4ea747c93741", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1305353004-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fa0b4b05efa4c328e4a203e324372c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a54c79e-87", "ovs_interfaceid": "2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.672882] env[62974]: DEBUG oslo_vmware.api [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653917, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.788049] env[62974]: DEBUG nova.compute.utils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.792839] env[62974]: DEBUG nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Not allocating networking since 'none' was specified. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 641.844696] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c97357-f74d-959b-0f10-d4f3590ba99b, 'name': SearchDatastore_Task, 'duration_secs': 0.011721} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.844835] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.845046] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.845324] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.845524] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.845799] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.846126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.846677] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 641.846755] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dd61733-db04-47ff-8dc6-143d300e1916 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.848652] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73694741-ede7-4abd-8d94-aa6e5756d481 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.854900] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 641.854900] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5236ba44-53ed-2cac-7677-d5c214ceb254" [ 641.854900] env[62974]: _type = "Task" [ 641.854900] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.859308] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.859308] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 641.860214] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bcac503-32f6-4140-b949-b898c349ae3e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.867552] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5236ba44-53ed-2cac-7677-d5c214ceb254, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.871317] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 641.871317] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c3926f-2645-25ee-faa9-0b56d840dec3" [ 641.871317] env[62974]: _type = "Task" [ 641.871317] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.880421] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c3926f-2645-25ee-faa9-0b56d840dec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.969384] env[62974]: DEBUG oslo_concurrency.lockutils [None req-efb08e5f-8937-4b3b-829a-d1ddcbfdc46e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.331s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.055905] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653916, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.078886] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.168672] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.169170] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] Releasing lock "refresh_cache-605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.169403] env[62974]: DEBUG nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Received event network-vif-deleted-aa8b790d-e5e2-42e7-bb13-826c844d11bc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 642.169592] env[62974]: INFO nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Neutron deleted interface aa8b790d-e5e2-42e7-bb13-826c844d11bc; detaching it from the instance and deleting it from the info cache [ 642.169760] env[62974]: DEBUG nova.network.neutron [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.176764] env[62974]: DEBUG oslo_vmware.api [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653917, 'name': PowerOffVM_Task, 'duration_secs': 0.187093} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.176764] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 642.176898] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 642.177599] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c269c2c7-89e0-4e72-9114-14950ee466c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.234516] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 642.235068] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 642.235068] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Deleting the datastore file [datastore1] a7a014b9-10e1-45a0-85da-4754051e8d82 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 642.235258] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-080b664d-8cfb-40e8-bafd-13ba0a73dea1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.241876] env[62974]: DEBUG oslo_vmware.api [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for the task: (returnval){ [ 642.241876] env[62974]: value = "task-2653919" [ 642.241876] env[62974]: _type = "Task" [ 642.241876] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.249561] env[62974]: DEBUG oslo_vmware.api [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.293343] env[62974]: DEBUG nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 642.297026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6453d09-0349-4489-ad9f-8f9213e3085b tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.298258] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.895s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.298434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.298584] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 642.298854] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.453s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.299068] env[62974]: DEBUG nova.objects.instance [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lazy-loading 'resources' on Instance uuid 1873faa1-dec2-4d17-a71a-c53fea50c09b {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 642.300944] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03db202d-97bf-489c-8b0c-e72a32b252fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.310559] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd96a767-5aeb-4db4-a8f5-5b2a6eac076c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.326359] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba44fa5-cf53-4493-b30d-2e3813865edd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.333303] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4882d357-2c8d-42a6-86c6-23f36463785b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.366072] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178965MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 642.366368] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.375191] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5236ba44-53ed-2cac-7677-d5c214ceb254, 'name': SearchDatastore_Task, 'duration_secs': 0.018514} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.378902] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.378902] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.379058] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.384701] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c3926f-2645-25ee-faa9-0b56d840dec3, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.385532] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db502f07-e9de-4e25-bf4e-72de43c0a4c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.390813] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 642.390813] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527c734d-805d-0882-6c7f-34e04be968a0" [ 642.390813] env[62974]: _type = "Task" [ 642.390813] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.398677] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527c734d-805d-0882-6c7f-34e04be968a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.472597] env[62974]: DEBUG nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.541517] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.541862] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.542170] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.542428] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.542658] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.545258] env[62974]: INFO nova.compute.manager [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Terminating instance [ 642.560047] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653916, 'name': CreateVM_Task, 'duration_secs': 1.000403} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.560205] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 642.560606] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.560731] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.561052] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.561294] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b9d91ef-851a-404f-85d0-b6d5c5edcf82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.566151] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 642.566151] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f667d6-45bb-7f99-2840-c365bd87a046" [ 642.566151] env[62974]: _type = "Task" [ 642.566151] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.573880] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f667d6-45bb-7f99-2840-c365bd87a046, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.673107] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51c2b222-41c7-47e8-afe1-a1c9a2cf97e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.682381] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35c258e-b2dc-4868-bc5c-c3b2a4f237a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.715695] env[62974]: DEBUG nova.compute.manager [req-e1f73d12-613c-4b45-9d2b-ef44d2e641ad req-cabb3b98-8a76-4d50-a06b-8dde70585bfb service nova] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Detach interface failed, port_id=aa8b790d-e5e2-42e7-bb13-826c844d11bc, reason: Instance 30fcd64c-4570-454b-a7e5-3246c92d90fc could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 642.752220] env[62974]: DEBUG oslo_vmware.api [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Task: {'id': task-2653919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172777} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.752573] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 642.752803] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 642.753025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 642.753245] env[62974]: INFO nova.compute.manager [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Took 1.11 seconds to destroy the instance on the hypervisor. [ 642.753541] env[62974]: DEBUG oslo.service.loopingcall [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 642.753767] env[62974]: DEBUG nova.compute.manager [-] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 642.753887] env[62974]: DEBUG nova.network.neutron [-] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 642.910925] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527c734d-805d-0882-6c7f-34e04be968a0, 'name': SearchDatastore_Task, 'duration_secs': 0.012079} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.911284] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.911622] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 5bc466fb-eebb-40b1-ba09-614a25782ecd/5bc466fb-eebb-40b1-ba09-614a25782ecd.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.915340] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.915625] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.915911] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4a9699e-60a1-47f5-8250-5d0da6019579 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.918768] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb5a5472-66a5-428c-8c04-e08189fa0cea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.927481] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 642.927481] env[62974]: value = "task-2653920" [ 642.927481] env[62974]: _type = "Task" [ 642.927481] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.931144] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.931281] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.934735] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9343d756-66e0-42a7-8f56-90f49ba4fda0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.940841] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.943887] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 642.943887] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52057c6e-429c-2e08-542b-94be84cfa8d8" [ 642.943887] env[62974]: _type = "Task" [ 642.943887] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.955014] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52057c6e-429c-2e08-542b-94be84cfa8d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.000258] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.054233] env[62974]: DEBUG nova.compute.manager [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 643.054313] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 643.055248] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5efc11-cf96-4b45-ab09-25a007000b19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.067048] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 643.067048] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14e87bcb-78fc-41e3-8587-3ddadcbd7cf8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.079099] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f667d6-45bb-7f99-2840-c365bd87a046, 'name': SearchDatastore_Task, 'duration_secs': 0.009063} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.083551] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.083847] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.084074] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.084373] env[62974]: DEBUG oslo_vmware.api [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 643.084373] env[62974]: value = "task-2653921" [ 643.084373] env[62974]: _type = "Task" [ 643.084373] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.086832] env[62974]: DEBUG nova.compute.manager [req-92187d8d-906f-4597-b924-474eb71eb7ea req-bd7f57de-3e22-4fe9-830c-633a1c76c3be service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Received event network-vif-deleted-0b68ad10-900f-4830-8982-2ad39bf5724d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 643.087044] env[62974]: INFO nova.compute.manager [req-92187d8d-906f-4597-b924-474eb71eb7ea req-bd7f57de-3e22-4fe9-830c-633a1c76c3be service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Neutron deleted interface 0b68ad10-900f-4830-8982-2ad39bf5724d; detaching it from the instance and deleting it from the info cache [ 643.088173] env[62974]: DEBUG nova.network.neutron [req-92187d8d-906f-4597-b924-474eb71eb7ea req-bd7f57de-3e22-4fe9-830c-633a1c76c3be service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.101283] env[62974]: DEBUG oslo_vmware.api [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653921, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.312495] env[62974]: DEBUG nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 643.347500] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 643.347816] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.348380] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 643.348721] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.349077] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 643.349135] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 643.349464] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 643.349655] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 643.349877] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 643.350095] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 643.350312] env[62974]: DEBUG nova.virt.hardware [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 643.351559] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2304ef0f-fafc-49bb-8b8b-002713d60730 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.362657] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d10eca-24e7-48ff-93df-146ae7260cf1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.383309] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.389777] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Creating folder: Project (66df039ee5a94fd590995e671b5ccaca). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.394999] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fbe096a-6597-4fee-95b9-5e5d473b68f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.404087] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Created folder: Project (66df039ee5a94fd590995e671b5ccaca) in parent group-v535199. [ 643.404087] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Creating folder: Instances. Parent ref: group-v535293. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.405102] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49e6e00c-1924-4fa4-94d1-75e6fda34716 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.415663] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Created folder: Instances in parent group-v535293. [ 643.416206] env[62974]: DEBUG oslo.service.loopingcall [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.416500] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.416684] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5afae4e-e190-477a-9e01-47ccb8b1687f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.441297] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507455} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.442621] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 5bc466fb-eebb-40b1-ba09-614a25782ecd/5bc466fb-eebb-40b1-ba09-614a25782ecd.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.442999] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.443242] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.443242] env[62974]: value = "task-2653924" [ 643.443242] env[62974]: _type = "Task" [ 643.443242] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.443506] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3a5bb8e-2ab8-4009-b4c0-d2835e0106ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.463112] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653924, 'name': CreateVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.468823] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 643.468823] env[62974]: value = "task-2653925" [ 643.468823] env[62974]: _type = "Task" [ 643.468823] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.469037] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52057c6e-429c-2e08-542b-94be84cfa8d8, 'name': SearchDatastore_Task, 'duration_secs': 0.01042} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.476024] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdb40141-ff38-4144-ab45-ef791f196650 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.483736] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.486237] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 643.486237] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520f206a-48ab-e316-b567-3680fc859935" [ 643.486237] env[62974]: _type = "Task" [ 643.486237] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.487247] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc94a86f-3796-48f4-a485-d610747f7148 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.502111] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be3d846-637f-4092-966a-acbfe221b782 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.505658] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520f206a-48ab-e316-b567-3680fc859935, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.537197] env[62974]: DEBUG nova.network.neutron [-] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.539163] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ff0914-3101-4567-b53e-45c0a56b9bcf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.547520] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89baba08-6b87-4b10-ad66-4c51ebea33e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.562563] env[62974]: DEBUG nova.compute.provider_tree [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.595923] env[62974]: DEBUG oslo_vmware.api [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653921, 'name': PowerOffVM_Task, 'duration_secs': 0.243154} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.596146] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35455cdc-5c59-4efe-b878-26047fdee09a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.598313] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 643.598459] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 643.598692] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d55c3fcf-5cca-4747-8575-94c9db5457b0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.608710] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7456858-11dd-4304-91fa-257fa0e42cba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.647271] env[62974]: DEBUG nova.compute.manager [req-92187d8d-906f-4597-b924-474eb71eb7ea req-bd7f57de-3e22-4fe9-830c-633a1c76c3be service nova] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Detach interface failed, port_id=0b68ad10-900f-4830-8982-2ad39bf5724d, reason: Instance a7a014b9-10e1-45a0-85da-4754051e8d82 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 643.678223] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 643.678464] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 643.678644] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleting the datastore file [datastore1] 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 643.678944] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21e61e40-cc59-46c7-a81d-330c5c430077 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.685389] env[62974]: DEBUG oslo_vmware.api [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 643.685389] env[62974]: value = "task-2653927" [ 643.685389] env[62974]: _type = "Task" [ 643.685389] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.693660] env[62974]: DEBUG oslo_vmware.api [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.961686] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653924, 'name': CreateVM_Task, 'duration_secs': 0.283183} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.962159] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 643.963733] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.964285] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.964859] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 643.965463] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa415e93-46fe-498a-bd0e-f09dedb227fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.970267] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 643.970267] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52455c83-d78e-f845-e6f9-f2c4f8d7298d" [ 643.970267] env[62974]: _type = "Task" [ 643.970267] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.981871] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52455c83-d78e-f845-e6f9-f2c4f8d7298d, 'name': SearchDatastore_Task, 'duration_secs': 0.009142} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.984716] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.984988] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.985247] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.985661] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069424} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.985876] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.986650] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b72b02-96ed-4e65-bc35-cc01ea2b3958 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.010089] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 5bc466fb-eebb-40b1-ba09-614a25782ecd/5bc466fb-eebb-40b1-ba09-614a25782ecd.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.010724] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05b629a3-7cee-4ba9-a30a-b8ebf9c47958 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.030272] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520f206a-48ab-e316-b567-3680fc859935, 'name': SearchDatastore_Task, 'duration_secs': 0.02036} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.031135] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.031256] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245/605b1e4c-9bd7-41cd-b5fe-05dd5d7af245.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 644.031530] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.031816] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.031956] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4491d6ac-4632-4ae4-b4cb-7bd41030f48d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.034919] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d92c4d4-5255-45ce-8cab-8895fde1f299 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.036647] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 644.036647] env[62974]: value = "task-2653928" [ 644.036647] env[62974]: _type = "Task" [ 644.036647] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.040539] env[62974]: INFO nova.compute.manager [-] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Took 1.29 seconds to deallocate network for instance. [ 644.046620] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 644.046620] env[62974]: value = "task-2653929" [ 644.046620] env[62974]: _type = "Task" [ 644.046620] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.048706] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.048706] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.054708] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e96cb5-294a-4a03-8f68-4b740e267313 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.057234] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.063812] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 644.063812] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523097d8-04d2-5eba-d3d4-92cacb956848" [ 644.063812] env[62974]: _type = "Task" [ 644.063812] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.064440] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.065354] env[62974]: DEBUG nova.scheduler.client.report [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 644.077066] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523097d8-04d2-5eba-d3d4-92cacb956848, 'name': SearchDatastore_Task, 'duration_secs': 0.008368} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.077852] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c8207cf-d346-41db-819a-75b1c5bc781b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.083564] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 644.083564] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529bcade-9302-3315-b5a4-d5b4d77dd3ba" [ 644.083564] env[62974]: _type = "Task" [ 644.083564] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.094557] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529bcade-9302-3315-b5a4-d5b4d77dd3ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008942} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.094884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.095498] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 644.095498] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.095695] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.096095] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87414c71-f968-4d5c-be18-c2cd75b0ce77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.097772] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f152b4c-ee52-44ee-ab48-7288d64b0289 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.105192] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 644.105192] env[62974]: value = "task-2653930" [ 644.105192] env[62974]: _type = "Task" [ 644.105192] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.109426] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.109593] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.110592] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50d4ff5b-be54-4e6e-9bc8-6dc1ffdece3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.115607] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.118349] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 644.118349] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52740849-04b5-4a44-7ee1-1e47decacb2f" [ 644.118349] env[62974]: _type = "Task" [ 644.118349] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.125281] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52740849-04b5-4a44-7ee1-1e47decacb2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.199017] env[62974]: DEBUG oslo_vmware.api [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2653927, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186208} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.199017] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 644.199017] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 644.199017] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 644.199017] env[62974]: INFO nova.compute.manager [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 644.199483] env[62974]: DEBUG oslo.service.loopingcall [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.199483] env[62974]: DEBUG nova.compute.manager [-] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 644.199483] env[62974]: DEBUG nova.network.neutron [-] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 644.546884] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653928, 'name': ReconfigVM_Task, 'duration_secs': 0.388836} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.547879] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.548204] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 5bc466fb-eebb-40b1-ba09-614a25782ecd/5bc466fb-eebb-40b1-ba09-614a25782ecd.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 644.548983] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebbe1cbe-de53-4d43-8f12-1d2f76f39726 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.558289] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.558566] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.562419] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 644.562419] env[62974]: value = "task-2653931" [ 644.562419] env[62974]: _type = "Task" [ 644.562419] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.566147] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458595} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.569846] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245/605b1e4c-9bd7-41cd-b5fe-05dd5d7af245.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 644.570203] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 644.570439] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad3b988c-dacd-4475-8ecd-218248c95a63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.573553] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.275s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.576302] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.705s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.576999] env[62974]: DEBUG nova.objects.instance [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lazy-loading 'resources' on Instance uuid ecde0e49-c344-4003-b858-8312c1ac344f {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 644.581951] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653931, 'name': Rename_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.583835] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 644.583835] env[62974]: value = "task-2653932" [ 644.583835] env[62974]: _type = "Task" [ 644.583835] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.596725] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653932, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.598202] env[62974]: INFO nova.scheduler.client.report [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted allocations for instance 1873faa1-dec2-4d17-a71a-c53fea50c09b [ 644.614967] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653930, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.629725] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52740849-04b5-4a44-7ee1-1e47decacb2f, 'name': SearchDatastore_Task, 'duration_secs': 0.017142} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.630237] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cff29254-d754-4a96-8b08-879853656d41 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.635657] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 644.635657] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219e73a-cc68-1360-398d-3be588b763f1" [ 644.635657] env[62974]: _type = "Task" [ 644.635657] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.643674] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219e73a-cc68-1360-398d-3be588b763f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.817017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.817259] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.980017] env[62974]: DEBUG nova.network.neutron [-] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.076150] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653931, 'name': Rename_Task, 'duration_secs': 0.291775} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.076436] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.076671] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8cbf40d9-d987-4ef0-8381-05e0c71a2880 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.086129] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 645.086129] env[62974]: value = "task-2653933" [ 645.086129] env[62974]: _type = "Task" [ 645.086129] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.096195] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653932, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248581} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.099139] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.099445] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.100170] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1aa6f96-a53b-408c-a3c6-ff0712f3ca44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.126620] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245/605b1e4c-9bd7-41cd-b5fe-05dd5d7af245.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.127261] env[62974]: DEBUG oslo_concurrency.lockutils [None req-601f0bae-1019-4387-a94f-ddb49816658c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "1873faa1-dec2-4d17-a71a-c53fea50c09b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.859s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.134721] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6286e784-78b6-464d-8e08-fbe2829f96ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.149683] env[62974]: DEBUG nova.compute.manager [req-9037167c-6222-479a-8ea2-cf892367fab5 req-5df5a12f-968e-4275-ba1f-37f762971419 service nova] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Received event network-vif-deleted-36d99cd3-daa8-4da2-b43f-85af2aaa66db {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 645.157904] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705918} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.159365] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 645.159580] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.159869] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 645.159869] env[62974]: value = "task-2653934" [ 645.159869] env[62974]: _type = "Task" [ 645.159869] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.162349] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e92761de-90bf-4aa8-a06c-426ae0004ef5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.170289] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219e73a-cc68-1360-398d-3be588b763f1, 'name': SearchDatastore_Task, 'duration_secs': 0.045447} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.170841] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.171100] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.171603] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46597d0c-e26f-4501-8866-3b4d80d6f3bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.177395] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.177671] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 645.177671] env[62974]: value = "task-2653935" [ 645.177671] env[62974]: _type = "Task" [ 645.177671] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.185553] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 645.185553] env[62974]: value = "task-2653936" [ 645.185553] env[62974]: _type = "Task" [ 645.185553] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.188706] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.196292] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.482971] env[62974]: INFO nova.compute.manager [-] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Took 1.29 seconds to deallocate network for instance. [ 645.581942] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23f977e-40f0-475d-b877-adfffeeb0ed9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.593312] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73715a0e-7a3c-400d-9c01-984e6afcf8eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.602918] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653933, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.628306] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2812c6a5-0745-4069-a448-dbc0f1cba525 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.636446] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806eeecf-a2cd-42eb-9b4b-1c67b3a6bb63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.651551] env[62974]: DEBUG nova.compute.provider_tree [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.674711] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.691413] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065323} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.694655] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.695805] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b873ef6-1655-4e1b-833d-32952cc2fc49 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.718074] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.720918] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef78f878-1224-4bbf-a44c-22b7993081c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.735121] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653936, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.740664] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 645.740664] env[62974]: value = "task-2653937" [ 645.740664] env[62974]: _type = "Task" [ 645.740664] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.748753] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.994406] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.099070] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653933, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.156151] env[62974]: DEBUG nova.scheduler.client.report [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.173516] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653934, 'name': ReconfigVM_Task, 'duration_secs': 0.626178} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.174619] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245/605b1e4c-9bd7-41cd-b5fe-05dd5d7af245.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.175111] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb62ebc2-7cd5-4376-bb78-5424f167188a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.182081] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 646.182081] env[62974]: value = "task-2653938" [ 646.182081] env[62974]: _type = "Task" [ 646.182081] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.191288] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653938, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.200436] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653936, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572405} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.200661] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.200863] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.201109] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b787b349-de58-403e-981c-e0934a21ff85 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.206998] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 646.206998] env[62974]: value = "task-2653939" [ 646.206998] env[62974]: _type = "Task" [ 646.206998] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.216564] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.250711] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653937, 'name': ReconfigVM_Task, 'duration_secs': 0.266282} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.250992] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf/41f20cb7-c9f9-4201-ae16-4f977dae26cf.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.251591] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f6d70bf-7e41-4c92-a2fb-25f5e72d877c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.257991] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 646.257991] env[62974]: value = "task-2653940" [ 646.257991] env[62974]: _type = "Task" [ 646.257991] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.266140] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653940, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.600112] env[62974]: DEBUG oslo_vmware.api [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653933, 'name': PowerOnVM_Task, 'duration_secs': 1.132414} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.600394] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 646.600597] env[62974]: INFO nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Took 5.88 seconds to spawn the instance on the hypervisor. [ 646.600804] env[62974]: DEBUG nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.601521] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88879cce-1a2f-4053-aa5c-2838aa1385bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.660025] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.662950] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.334s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.663205] env[62974]: DEBUG nova.objects.instance [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lazy-loading 'resources' on Instance uuid 6dc914e9-bce5-4a19-a919-ae94981ea800 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 646.677924] env[62974]: INFO nova.scheduler.client.report [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Deleted allocations for instance ecde0e49-c344-4003-b858-8312c1ac344f [ 646.691760] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653938, 'name': Rename_Task, 'duration_secs': 0.148293} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.692069] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 646.692297] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8d99caf-594f-4b58-938a-53e25b583703 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.698790] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 646.698790] env[62974]: value = "task-2653941" [ 646.698790] env[62974]: _type = "Task" [ 646.698790] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.706854] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653941, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.715286] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062111} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.715527] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.716302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6ca388-40f3-4605-8e88-66306da5e5fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.737239] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.738300] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7abf4cfb-e761-49c6-9e4e-98041a96294b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.757383] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 646.757383] env[62974]: value = "task-2653942" [ 646.757383] env[62974]: _type = "Task" [ 646.757383] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.769116] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653940, 'name': Rename_Task, 'duration_secs': 0.140436} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.772063] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 646.772294] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653942, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.772493] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e031588a-9578-41c3-9eea-71a9d67ca465 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.777114] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Waiting for the task: (returnval){ [ 646.777114] env[62974]: value = "task-2653943" [ 646.777114] env[62974]: _type = "Task" [ 646.777114] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.785296] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653943, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.121175] env[62974]: INFO nova.compute.manager [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Took 39.95 seconds to build instance. [ 647.185649] env[62974]: DEBUG oslo_concurrency.lockutils [None req-26e63555-224a-46d0-b285-2707c8bb7c6a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "ecde0e49-c344-4003-b858-8312c1ac344f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.426s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.208570] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653941, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.266661] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.287824] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653943, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.423384] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.423667] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.423900] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.424126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.424315] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.426931] env[62974]: INFO nova.compute.manager [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Terminating instance [ 647.606292] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceaac83-d820-4460-b6af-19364c50ad89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.616067] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4c2278-82a9-4395-80a1-45fb38e01ca9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.646597] env[62974]: DEBUG oslo_concurrency.lockutils [None req-126cf0fc-4acf-45b8-ab14-bc22e9f82a2a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "5bc466fb-eebb-40b1-ba09-614a25782ecd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.546s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.651871] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3e630f-b32a-44a2-9d34-398fb0b3095f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.661643] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b061b6-b4c7-41f3-9731-124fe2bffd1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.678219] env[62974]: DEBUG nova.compute.provider_tree [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.709143] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653941, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.769332] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653942, 'name': ReconfigVM_Task, 'duration_secs': 0.971862} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.769628] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 647.770339] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa364fd3-a145-4246-8d5d-229c67597471 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.777673] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 647.777673] env[62974]: value = "task-2653944" [ 647.777673] env[62974]: _type = "Task" [ 647.777673] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.791482] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653944, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.791715] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653943, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.936421] env[62974]: DEBUG nova.compute.manager [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 647.936640] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 647.937606] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fed7af4-b8d7-4848-a19d-9bbe82d1782b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.945655] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 647.945978] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1443bc68-3e28-4718-9831-3ff44f3f3c96 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.952228] env[62974]: DEBUG oslo_vmware.api [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 647.952228] env[62974]: value = "task-2653945" [ 647.952228] env[62974]: _type = "Task" [ 647.952228] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.960690] env[62974]: DEBUG oslo_vmware.api [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.155845] env[62974]: DEBUG nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 648.180943] env[62974]: DEBUG nova.scheduler.client.report [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.212475] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653941, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.290064] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653944, 'name': Rename_Task, 'duration_secs': 0.135122} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.293014] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 648.293320] env[62974]: DEBUG oslo_vmware.api [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Task: {'id': task-2653943, 'name': PowerOnVM_Task, 'duration_secs': 1.109197} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.293517] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d19f69c-0fd7-4881-9a29-5163cb780032 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.294984] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.295207] env[62974]: DEBUG nova.compute.manager [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.295917] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e66cb3d-f8bb-41ee-b579-416325b7f7aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.304981] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 648.304981] env[62974]: value = "task-2653946" [ 648.304981] env[62974]: _type = "Task" [ 648.304981] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.312859] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.464028] env[62974]: DEBUG oslo_vmware.api [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653945, 'name': PowerOffVM_Task, 'duration_secs': 0.208373} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.464028] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 648.464028] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 648.464028] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b988dae-d679-4a18-9e53-5bfaaa28bb8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.529769] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 648.529993] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 648.530198] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Deleting the datastore file [datastore2] 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 648.530473] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bd169de-8851-463b-8a46-b6567af843ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.536272] env[62974]: DEBUG oslo_vmware.api [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for the task: (returnval){ [ 648.536272] env[62974]: value = "task-2653948" [ 648.536272] env[62974]: _type = "Task" [ 648.536272] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.544159] env[62974]: DEBUG oslo_vmware.api [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.679394] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.689486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.026s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.692158] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.297s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.694033] env[62974]: INFO nova.compute.claims [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.712173] env[62974]: DEBUG oslo_vmware.api [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653941, 'name': PowerOnVM_Task, 'duration_secs': 1.739147} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.713196] env[62974]: INFO nova.scheduler.client.report [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted allocations for instance 6dc914e9-bce5-4a19-a919-ae94981ea800 [ 648.714369] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.714611] env[62974]: INFO nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Took 10.73 seconds to spawn the instance on the hypervisor. [ 648.714837] env[62974]: DEBUG nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.718496] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd766ec-17e1-4319-9d13-042856d9f9fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.815137] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.820877] env[62974]: DEBUG oslo_vmware.api [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653946, 'name': PowerOnVM_Task, 'duration_secs': 0.464806} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.821151] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.821355] env[62974]: INFO nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Took 5.51 seconds to spawn the instance on the hypervisor. [ 648.821525] env[62974]: DEBUG nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.822319] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8a09fc-1a12-4f7e-87d4-4e1e65894f08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.046603] env[62974]: DEBUG oslo_vmware.api [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Task: {'id': task-2653948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162649} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.046892] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 649.047140] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 649.047336] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 649.047511] env[62974]: INFO nova.compute.manager [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 649.047753] env[62974]: DEBUG oslo.service.loopingcall [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.047943] env[62974]: DEBUG nova.compute.manager [-] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 649.048093] env[62974]: DEBUG nova.network.neutron [-] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 649.227781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-567918e5-be85-4ac7-bf64-4e3ef780f3eb tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "6dc914e9-bce5-4a19-a919-ae94981ea800" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.875s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.242380] env[62974]: INFO nova.compute.manager [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Took 43.38 seconds to build instance. [ 649.340149] env[62974]: INFO nova.compute.manager [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Took 33.87 seconds to build instance. [ 649.478319] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.478614] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.478824] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.479017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.479190] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.483872] env[62974]: INFO nova.compute.manager [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Terminating instance [ 649.564637] env[62974]: DEBUG nova.compute.manager [req-c7db9d1e-1ca8-48c9-8148-929b11c79917 req-10a9fdda-e56e-49da-9c51-3fa874603cec service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Received event network-vif-deleted-ecdf9d70-caf4-4804-80f1-953a7ce00868 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 649.564713] env[62974]: INFO nova.compute.manager [req-c7db9d1e-1ca8-48c9-8148-929b11c79917 req-10a9fdda-e56e-49da-9c51-3fa874603cec service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Neutron deleted interface ecdf9d70-caf4-4804-80f1-953a7ce00868; detaching it from the instance and deleting it from the info cache [ 649.564936] env[62974]: DEBUG nova.network.neutron [req-c7db9d1e-1ca8-48c9-8148-929b11c79917 req-10a9fdda-e56e-49da-9c51-3fa874603cec service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.720520] env[62974]: INFO nova.compute.manager [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Rebuilding instance [ 649.745022] env[62974]: DEBUG oslo_concurrency.lockutils [None req-50a108d6-9156-4ac3-bb2e-001564b14576 tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.707s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.758014] env[62974]: DEBUG nova.compute.manager [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 649.759516] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee841bdf-365b-409c-ae56-954f9ea44f57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.811982] env[62974]: DEBUG nova.network.neutron [-] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.841597] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0967a59-7690-4bc6-8d7f-16d239154169 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "6e81e765-4fe3-42a7-a0ba-9860be897a70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.831s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.988877] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "refresh_cache-41f20cb7-c9f9-4201-ae16-4f977dae26cf" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.989291] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquired lock "refresh_cache-41f20cb7-c9f9-4201-ae16-4f977dae26cf" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.989291] env[62974]: DEBUG nova.network.neutron [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.067198] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f4e33ce-4478-4a95-855e-8fcb6e5fe0cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.084780] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae88f2ea-d2c1-4e50-8024-d01c008207a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.141333] env[62974]: DEBUG nova.compute.manager [req-c7db9d1e-1ca8-48c9-8148-929b11c79917 req-10a9fdda-e56e-49da-9c51-3fa874603cec service nova] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Detach interface failed, port_id=ecdf9d70-caf4-4804-80f1-953a7ce00868, reason: Instance 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 650.198989] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23146f7f-c1fe-4882-9039-188f62443872 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.209051] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147225b3-8d41-4d5c-b838-0c593ddef876 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.256037] env[62974]: DEBUG nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 650.262199] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf7f829-83a6-4037-8f6c-417d1e86845b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.279120] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b049fea-abda-41c6-a709-5b5c7057651a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.306978] env[62974]: DEBUG nova.compute.provider_tree [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.314460] env[62974]: INFO nova.compute.manager [-] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Took 1.27 seconds to deallocate network for instance. [ 650.347332] env[62974]: DEBUG nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 650.518288] env[62974]: DEBUG nova.network.neutron [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.604540] env[62974]: DEBUG nova.network.neutron [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.771884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.771884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.771884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.771884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.772327] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.773093] env[62974]: INFO nova.compute.manager [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Terminating instance [ 650.777301] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.777747] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.777988] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d4d6132-bb00-4c12-b41d-00df9ff373e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.785183] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 650.785183] env[62974]: value = "task-2653949" [ 650.785183] env[62974]: _type = "Task" [ 650.785183] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.794305] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.809878] env[62974]: DEBUG nova.scheduler.client.report [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.822159] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.870588] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.107671] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Releasing lock "refresh_cache-41f20cb7-c9f9-4201-ae16-4f977dae26cf" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.108141] env[62974]: DEBUG nova.compute.manager [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 651.108340] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.109223] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3696da6-a349-4cff-8735-223511ae0bd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.116856] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 651.117132] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8436cf7c-490b-402c-8463-e3ba5cb299f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.122686] env[62974]: DEBUG oslo_vmware.api [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 651.122686] env[62974]: value = "task-2653950" [ 651.122686] env[62974]: _type = "Task" [ 651.122686] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.130374] env[62974]: DEBUG oslo_vmware.api [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.277591] env[62974]: DEBUG nova.compute.manager [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 651.277802] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.278740] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf7ead3-4c3d-4117-9814-cf88c23decad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.286469] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 651.290069] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bcb3512-76e1-420e-a41e-93dbf1353322 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.297149] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653949, 'name': PowerOffVM_Task, 'duration_secs': 0.208918} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.298398] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.298637] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.299013] env[62974]: DEBUG oslo_vmware.api [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 651.299013] env[62974]: value = "task-2653951" [ 651.299013] env[62974]: _type = "Task" [ 651.299013] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.299782] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca24ca36-5438-4557-89ee-76c977021c47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.309581] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.312572] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53047d6e-6f2f-4e44-9aad-ecb92ecfc195 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.313976] env[62974]: DEBUG oslo_vmware.api [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.314675] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.315148] env[62974]: DEBUG nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 651.317628] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.815s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.319047] env[62974]: INFO nova.compute.claims [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 651.383120] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.383348] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.383530] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Deleting the datastore file [datastore2] 6e81e765-4fe3-42a7-a0ba-9860be897a70 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.383814] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f9ea1f1-a7fb-4112-817a-da71f0ab0855 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.390888] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 651.390888] env[62974]: value = "task-2653953" [ 651.390888] env[62974]: _type = "Task" [ 651.390888] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.399433] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.632366] env[62974]: DEBUG oslo_vmware.api [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653950, 'name': PowerOffVM_Task, 'duration_secs': 0.278728} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.633078] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.633078] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.633078] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ff23947-9348-4f90-a69b-03b31627765c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.656830] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.657062] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.657288] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Deleting the datastore file [datastore2] 41f20cb7-c9f9-4201-ae16-4f977dae26cf {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.657718] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d3779cd-1b45-4ef7-b677-0305283bf04f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.663425] env[62974]: DEBUG oslo_vmware.api [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for the task: (returnval){ [ 651.663425] env[62974]: value = "task-2653955" [ 651.663425] env[62974]: _type = "Task" [ 651.663425] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.671212] env[62974]: DEBUG oslo_vmware.api [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.811709] env[62974]: DEBUG oslo_vmware.api [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653951, 'name': PowerOffVM_Task, 'duration_secs': 0.197695} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.811937] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.812124] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.812374] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9577b21-8065-4fe5-a4d7-2e340fa9bcad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.823466] env[62974]: DEBUG nova.compute.utils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 651.826735] env[62974]: DEBUG nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Not allocating networking since 'none' was specified. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 651.875138] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.875414] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.875651] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Deleting the datastore file [datastore2] 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.875931] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e401ab78-6d14-468e-b9d3-de5c3ac7982a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.881724] env[62974]: DEBUG oslo_vmware.api [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for the task: (returnval){ [ 651.881724] env[62974]: value = "task-2653957" [ 651.881724] env[62974]: _type = "Task" [ 651.881724] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.891026] env[62974]: DEBUG oslo_vmware.api [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.898076] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127893} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.898310] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.898489] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.898658] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.173824] env[62974]: DEBUG oslo_vmware.api [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Task: {'id': task-2653955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085974} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.174095] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.174283] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 652.174456] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.174629] env[62974]: INFO nova.compute.manager [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Took 1.07 seconds to destroy the instance on the hypervisor. [ 652.174867] env[62974]: DEBUG oslo.service.loopingcall [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.175117] env[62974]: DEBUG nova.compute.manager [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 652.175220] env[62974]: DEBUG nova.network.neutron [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.190170] env[62974]: DEBUG nova.network.neutron [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.327547] env[62974]: DEBUG nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 652.393576] env[62974]: DEBUG oslo_vmware.api [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Task: {'id': task-2653957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277551} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.393897] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.394139] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 652.394393] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.394556] env[62974]: INFO nova.compute.manager [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Took 1.12 seconds to destroy the instance on the hypervisor. [ 652.394820] env[62974]: DEBUG oslo.service.loopingcall [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.397159] env[62974]: DEBUG nova.compute.manager [-] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 652.397268] env[62974]: DEBUG nova.network.neutron [-] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.678948] env[62974]: DEBUG nova.compute.manager [req-fbaf0399-64da-4dd4-a0e0-34fdc34c662a req-e13eca3a-4ece-4903-8422-95b9eeb91a42 service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Received event network-vif-deleted-2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 652.678948] env[62974]: INFO nova.compute.manager [req-fbaf0399-64da-4dd4-a0e0-34fdc34c662a req-e13eca3a-4ece-4903-8422-95b9eeb91a42 service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Neutron deleted interface 2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909; detaching it from the instance and deleting it from the info cache [ 652.678948] env[62974]: DEBUG nova.network.neutron [req-fbaf0399-64da-4dd4-a0e0-34fdc34c662a req-e13eca3a-4ece-4903-8422-95b9eeb91a42 service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.692935] env[62974]: DEBUG nova.network.neutron [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.779474] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bcaa3b-87db-4cf8-85ec-e6bb6645beee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.788056] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b4af01-c2f4-4425-88f1-851e948fe2b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.821264] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4695f9b7-73bf-42ce-885d-e772c3f9d0f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.829673] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda290dd-d257-42e2-add6-7bdeca2c5cf8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.848703] env[62974]: DEBUG nova.compute.provider_tree [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.931598] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.931845] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.931998] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.932193] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.932336] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.932476] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.932681] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.932835] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.932994] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.933164] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.933360] env[62974]: DEBUG nova.virt.hardware [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.934208] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8cbdd9-9c65-4613-85df-1d0a84ea82e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.942426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b3e3fe-2ddd-4b64-bf76-75eace34e273 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.957453] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.962998] env[62974]: DEBUG oslo.service.loopingcall [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.963306] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.963533] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98854f22-5914-46f9-8462-3c1d541596cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.980541] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.980541] env[62974]: value = "task-2653958" [ 652.980541] env[62974]: _type = "Task" [ 652.980541] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.988955] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653958, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.146667] env[62974]: DEBUG nova.network.neutron [-] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.180372] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62073dca-8010-4fd2-a483-a8e48a8e97f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.191130] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3723163-b0a7-4c22-a8e9-4008f3d28f76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.203380] env[62974]: INFO nova.compute.manager [-] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Took 1.03 seconds to deallocate network for instance. [ 653.227021] env[62974]: DEBUG nova.compute.manager [req-fbaf0399-64da-4dd4-a0e0-34fdc34c662a req-e13eca3a-4ece-4903-8422-95b9eeb91a42 service nova] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Detach interface failed, port_id=2a54c79e-87b6-4b3a-86ed-2bc9eb6e1909, reason: Instance 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 653.339446] env[62974]: DEBUG nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 653.351403] env[62974]: DEBUG nova.scheduler.client.report [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.362132] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 653.362372] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.362528] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.362702] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.362844] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.362985] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 653.363199] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 653.363351] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 653.363507] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 653.363913] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 653.364304] env[62974]: DEBUG nova.virt.hardware [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 653.365729] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dbb645-e06c-43fe-9afb-a3336501d238 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.377466] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7f74ab-1075-49d0-8f39-d4202a1b578c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.390814] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.396221] env[62974]: DEBUG oslo.service.loopingcall [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.396435] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.396627] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1541e3fc-aa26-45db-aeed-f800a60d47d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.412149] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.412149] env[62974]: value = "task-2653959" [ 653.412149] env[62974]: _type = "Task" [ 653.412149] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.419343] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653959, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.490352] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653958, 'name': CreateVM_Task, 'duration_secs': 0.281179} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.490536] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.490952] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.491147] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.491492] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.491744] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f87f912-d7a1-44a1-ba0f-ae4f95dd299c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.496452] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 653.496452] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523bedb5-96f2-d98f-d83e-cb02ae81b59c" [ 653.496452] env[62974]: _type = "Task" [ 653.496452] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.504734] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523bedb5-96f2-d98f-d83e-cb02ae81b59c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.649604] env[62974]: INFO nova.compute.manager [-] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Took 1.25 seconds to deallocate network for instance. [ 653.711425] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.857804] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.858313] env[62974]: DEBUG nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 653.861259] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.242s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.861469] env[62974]: DEBUG nova.objects.instance [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lazy-loading 'resources' on Instance uuid 7f0d367d-9d60-414b-990e-56a2b43fd963 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 653.922691] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653959, 'name': CreateVM_Task, 'duration_secs': 0.256699} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.922865] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.923293] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.009830] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523bedb5-96f2-d98f-d83e-cb02ae81b59c, 'name': SearchDatastore_Task, 'duration_secs': 0.008426} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.010167] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.010446] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.010692] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.010887] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.011152] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.011845] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.012036] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 654.012359] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fc172b3-9214-449b-9545-a890567c0352 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.014772] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1bce9b2-9366-4c1f-ac32-d7c4b6f375c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.021754] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 654.021754] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5271a3e5-0e22-90b3-7b24-17f810c60519" [ 654.021754] env[62974]: _type = "Task" [ 654.021754] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.026483] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.026483] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.027513] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa20a4ea-2ca9-46ef-b230-a6e7eaab6d5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.035599] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5271a3e5-0e22-90b3-7b24-17f810c60519, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.038644] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 654.038644] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d1b6d2-2b25-c502-3e8c-e3f225e3de1c" [ 654.038644] env[62974]: _type = "Task" [ 654.038644] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.047486] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d1b6d2-2b25-c502-3e8c-e3f225e3de1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.156423] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.368154] env[62974]: DEBUG nova.compute.utils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 654.370376] env[62974]: DEBUG nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 654.370583] env[62974]: DEBUG nova.network.neutron [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 654.408503] env[62974]: DEBUG nova.policy [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e0f9b6a0f9f4f4e992381105c69dfbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57e631c2e78a4391bceb20072992f8bd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 654.532016] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5271a3e5-0e22-90b3-7b24-17f810c60519, 'name': SearchDatastore_Task, 'duration_secs': 0.016916} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.534367] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.534592] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.534807] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.549271] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d1b6d2-2b25-c502-3e8c-e3f225e3de1c, 'name': SearchDatastore_Task, 'duration_secs': 0.008323} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.550045] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35b3fea7-08dc-472d-97dd-92e27b901ac2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.555406] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 654.555406] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd8194-d74a-021a-9bf8-45ee6493cf66" [ 654.555406] env[62974]: _type = "Task" [ 654.555406] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.565224] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd8194-d74a-021a-9bf8-45ee6493cf66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.664995] env[62974]: DEBUG nova.network.neutron [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Successfully created port: 07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.819868] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d371175-427f-4912-9977-c8268cbd3b54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.828101] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a7ebfc-1ebd-4393-ab47-b839d9a5c53f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.862240] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1e4f71-e805-49b9-9a8c-e6e7d15ed90f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.871521] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71e0b6c-e2d4-4259-9558-5e59ee0e84fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.875931] env[62974]: DEBUG nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.888353] env[62974]: DEBUG nova.compute.provider_tree [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.066951] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd8194-d74a-021a-9bf8-45ee6493cf66, 'name': SearchDatastore_Task, 'duration_secs': 0.024777} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.067320] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.067514] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.067789] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.067985] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.068226] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bf5621b-e693-46d7-baf6-4de4861f2292 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.070204] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7db3d7d9-215f-4d9a-92d5-ab572d314644 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.079968] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 655.079968] env[62974]: value = "task-2653960" [ 655.079968] env[62974]: _type = "Task" [ 655.079968] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.079968] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.079968] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 655.082743] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c3a8882-eeca-472d-9e40-74f2b2ab417b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.087745] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 655.087745] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526c5df1-5fb9-160f-ee73-ccbb31447fde" [ 655.087745] env[62974]: _type = "Task" [ 655.087745] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.090822] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653960, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.098952] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526c5df1-5fb9-160f-ee73-ccbb31447fde, 'name': SearchDatastore_Task, 'duration_secs': 0.008777} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.099706] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8b36ae-5b6f-42ab-a460-a706e16288a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.104332] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 655.104332] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af6708-a6b3-45a0-7565-e68f099891da" [ 655.104332] env[62974]: _type = "Task" [ 655.104332] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.111426] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af6708-a6b3-45a0-7565-e68f099891da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.392553] env[62974]: DEBUG nova.scheduler.client.report [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.589090] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653960, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458493} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.589476] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.589751] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.590052] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ecaa823-7c58-4331-bd5a-d18923c77da0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.596022] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 655.596022] env[62974]: value = "task-2653961" [ 655.596022] env[62974]: _type = "Task" [ 655.596022] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.603849] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653961, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.612366] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af6708-a6b3-45a0-7565-e68f099891da, 'name': SearchDatastore_Task, 'duration_secs': 0.007905} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.612692] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.612985] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.613280] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48c45fb9-26c1-4662-aa0a-f9e511d8170a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.619600] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 655.619600] env[62974]: value = "task-2653962" [ 655.619600] env[62974]: _type = "Task" [ 655.619600] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.626601] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.888192] env[62974]: DEBUG nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 655.898027] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.900598] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.497s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.901730] env[62974]: DEBUG nova.objects.instance [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lazy-loading 'resources' on Instance uuid 586a3541-060f-4859-8507-17faa637b17e {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 655.917389] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.918216] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.918575] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.919019] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.919261] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.919434] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.920482] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.920482] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.920482] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.920482] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.920482] env[62974]: DEBUG nova.virt.hardware [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.921642] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543c576f-e5e6-42d5-9781-8b0fba5a49c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.925140] env[62974]: INFO nova.scheduler.client.report [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Deleted allocations for instance 7f0d367d-9d60-414b-990e-56a2b43fd963 [ 655.935833] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e675f283-f1ec-4b42-baf0-2036fc522cf3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.094843] env[62974]: DEBUG nova.compute.manager [req-dc11217d-7e98-4e2c-93f6-af2d584559c8 req-ec673ad3-112c-440f-815f-f4fcd61befeb service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received event network-vif-plugged-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 656.094843] env[62974]: DEBUG oslo_concurrency.lockutils [req-dc11217d-7e98-4e2c-93f6-af2d584559c8 req-ec673ad3-112c-440f-815f-f4fcd61befeb service nova] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.097118] env[62974]: DEBUG oslo_concurrency.lockutils [req-dc11217d-7e98-4e2c-93f6-af2d584559c8 req-ec673ad3-112c-440f-815f-f4fcd61befeb service nova] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.097563] env[62974]: DEBUG oslo_concurrency.lockutils [req-dc11217d-7e98-4e2c-93f6-af2d584559c8 req-ec673ad3-112c-440f-815f-f4fcd61befeb service nova] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.097938] env[62974]: DEBUG nova.compute.manager [req-dc11217d-7e98-4e2c-93f6-af2d584559c8 req-ec673ad3-112c-440f-815f-f4fcd61befeb service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] No waiting events found dispatching network-vif-plugged-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.098228] env[62974]: WARNING nova.compute.manager [req-dc11217d-7e98-4e2c-93f6-af2d584559c8 req-ec673ad3-112c-440f-815f-f4fcd61befeb service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received unexpected event network-vif-plugged-07b0aa8b-b38d-489b-9998-6efe6126083f for instance with vm_state building and task_state spawning. [ 656.109632] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653961, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063367} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.110060] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.110920] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ec1a52-c601-4d54-aee4-9c391d5ee8ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.133484] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.138047] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d82ec776-41b8-48ff-bf71-dddd8143379f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.159107] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448881} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.160211] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.160211] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.160211] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 656.160211] env[62974]: value = "task-2653963" [ 656.160211] env[62974]: _type = "Task" [ 656.160211] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.160211] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad0a2831-aa55-45c9-b252-32f8dab62371 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.169970] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653963, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.171210] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 656.171210] env[62974]: value = "task-2653964" [ 656.171210] env[62974]: _type = "Task" [ 656.171210] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.180226] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.181068] env[62974]: DEBUG nova.network.neutron [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Successfully updated port: 07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 656.432909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eaf091e-db11-4420-a171-7a8bd487c09f tempest-ImagesOneServerTestJSON-958584175 tempest-ImagesOneServerTestJSON-958584175-project-member] Lock "7f0d367d-9d60-414b-990e-56a2b43fd963" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.552s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.671555] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.681665] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.327654} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.681893] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.682658] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0188a61b-491f-4da1-b6ae-981f39a37476 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.687554] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.687803] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.687848] env[62974]: DEBUG nova.network.neutron [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 656.706086] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.709460] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3de4d052-9e53-43f4-91b3-86dcb8baa935 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.729046] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 656.729046] env[62974]: value = "task-2653965" [ 656.729046] env[62974]: _type = "Task" [ 656.729046] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.737573] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.868931] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef02e72c-008b-46ff-aa4b-6100eaf5cdea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.880356] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e91977b-d8cd-4081-8e1d-a18e59f02b7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.915466] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174ae8b1-7473-4980-abeb-830354f4bccc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.923323] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e9a569-1f6d-486e-b292-3e689d57f07f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.937373] env[62974]: DEBUG nova.compute.provider_tree [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.173283] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653963, 'name': ReconfigVM_Task, 'duration_secs': 0.722744} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.173562] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 6e81e765-4fe3-42a7-a0ba-9860be897a70/6e81e765-4fe3-42a7-a0ba-9860be897a70.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.174193] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91b34421-7912-437a-b17e-d0ec241ebb36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.180621] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 657.180621] env[62974]: value = "task-2653966" [ 657.180621] env[62974]: _type = "Task" [ 657.180621] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.188419] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653966, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.220769] env[62974]: DEBUG nova.network.neutron [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.243094] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653965, 'name': ReconfigVM_Task, 'duration_secs': 0.304698} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.245554] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.246289] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bd0538e-d7fe-43b4-a0c8-e1bd2a5ea1c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.253627] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 657.253627] env[62974]: value = "task-2653967" [ 657.253627] env[62974]: _type = "Task" [ 657.253627] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.261791] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653967, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.377343] env[62974]: DEBUG nova.network.neutron [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": "07b0aa8b-b38d-489b-9998-6efe6126083f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.441090] env[62974]: DEBUG nova.scheduler.client.report [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.697643] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653966, 'name': Rename_Task, 'duration_secs': 0.158076} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.697920] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.701352] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d6f8b6a-471f-431f-bf5f-93f679879124 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.707934] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 657.707934] env[62974]: value = "task-2653968" [ 657.707934] env[62974]: _type = "Task" [ 657.707934] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.716410] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653968, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.762779] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653967, 'name': Rename_Task, 'duration_secs': 0.151807} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.763078] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.763320] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36baaca5-d8c9-48b4-ad12-6b7dc66ab2c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.770302] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 657.770302] env[62974]: value = "task-2653969" [ 657.770302] env[62974]: _type = "Task" [ 657.770302] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.779524] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.880551] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.880800] env[62974]: DEBUG nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Instance network_info: |[{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": "07b0aa8b-b38d-489b-9998-6efe6126083f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 657.881284] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:15:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f54f7284-8f7d-47ee-839d-2143062cfe44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07b0aa8b-b38d-489b-9998-6efe6126083f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 657.889518] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating folder: Project (57e631c2e78a4391bceb20072992f8bd). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.889810] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a57fd519-cc9d-4f45-a1e7-3d62c180b812 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.901401] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Created folder: Project (57e631c2e78a4391bceb20072992f8bd) in parent group-v535199. [ 657.901612] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating folder: Instances. Parent ref: group-v535298. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.901876] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b205cb2-7684-403d-ac51-bad80099263b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.910811] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Created folder: Instances in parent group-v535298. [ 657.911088] env[62974]: DEBUG oslo.service.loopingcall [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 657.911299] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 657.911528] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95f32140-fc4c-45b9-a6ff-368d049b551b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.932534] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 657.932534] env[62974]: value = "task-2653972" [ 657.932534] env[62974]: _type = "Task" [ 657.932534] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.940203] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653972, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.946128] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.948314] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.002s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.952842] env[62974]: INFO nova.compute.claims [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.982686] env[62974]: INFO nova.scheduler.client.report [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Deleted allocations for instance 586a3541-060f-4859-8507-17faa637b17e [ 658.123481] env[62974]: DEBUG nova.compute.manager [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received event network-changed-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 658.123807] env[62974]: DEBUG nova.compute.manager [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Refreshing instance network info cache due to event network-changed-07b0aa8b-b38d-489b-9998-6efe6126083f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 658.124109] env[62974]: DEBUG oslo_concurrency.lockutils [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] Acquiring lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.124313] env[62974]: DEBUG oslo_concurrency.lockutils [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] Acquired lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.124551] env[62974]: DEBUG nova.network.neutron [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Refreshing network info cache for port 07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.217491] env[62974]: DEBUG oslo_vmware.api [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653968, 'name': PowerOnVM_Task, 'duration_secs': 0.439787} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.217740] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.217958] env[62974]: DEBUG nova.compute.manager [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.218786] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f07a706-50ad-4e08-aecc-ba073aa4db12 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.283235] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653969, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.443020] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653972, 'name': CreateVM_Task, 'duration_secs': 0.341253} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.443020] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 658.443731] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.443888] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.444448] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 658.444448] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05325ad1-b8d6-46ef-8140-a116a4714e87 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.449137] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 658.449137] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c02b2-f196-e11c-058d-10e0257219e0" [ 658.449137] env[62974]: _type = "Task" [ 658.449137] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.460457] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c02b2-f196-e11c-058d-10e0257219e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.490325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8716add-f41f-4d4b-ae7e-f86dac7d48f8 tempest-ServerRescueTestJSONUnderV235-216947283 tempest-ServerRescueTestJSONUnderV235-216947283-project-member] Lock "586a3541-060f-4859-8507-17faa637b17e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.033s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.740680] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.780835] env[62974]: DEBUG oslo_vmware.api [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653969, 'name': PowerOnVM_Task, 'duration_secs': 0.589892} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.781124] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.781328] env[62974]: INFO nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Took 5.44 seconds to spawn the instance on the hypervisor. [ 658.781507] env[62974]: DEBUG nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.782281] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621d501c-e269-4835-a07b-2a63aed1d444 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.913155] env[62974]: DEBUG nova.network.neutron [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updated VIF entry in instance network info cache for port 07b0aa8b-b38d-489b-9998-6efe6126083f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 658.914145] env[62974]: DEBUG nova.network.neutron [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": "07b0aa8b-b38d-489b-9998-6efe6126083f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.960114] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c02b2-f196-e11c-058d-10e0257219e0, 'name': SearchDatastore_Task, 'duration_secs': 0.011796} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.960406] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.960608] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 658.960647] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.960767] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.960932] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 658.964059] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f114ecb-46f5-4a8d-a8db-79d8b10c011d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.973610] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.973793] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 658.974599] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60253835-67f1-43c6-a07a-2dd5c75a0024 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.979852] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 658.979852] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af068f-f4f2-6a65-2cab-d4a5c4663667" [ 658.979852] env[62974]: _type = "Task" [ 658.979852] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.987937] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af068f-f4f2-6a65-2cab-d4a5c4663667, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.302305] env[62974]: INFO nova.compute.manager [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Took 31.93 seconds to build instance. [ 659.421635] env[62974]: DEBUG oslo_concurrency.lockutils [req-be932df6-e70f-4aa0-bafe-db8226fe5842 req-23f9fb53-1f3f-4520-bd5f-e8e0ae09c5ad service nova] Releasing lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.494158] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af068f-f4f2-6a65-2cab-d4a5c4663667, 'name': SearchDatastore_Task, 'duration_secs': 0.011875} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.497964] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fe43877-1865-44b5-85da-3186c3c88818 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.503468] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 659.503468] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ab7421-2fed-c310-72e1-4f19f1b879d3" [ 659.503468] env[62974]: _type = "Task" [ 659.503468] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.511120] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ab7421-2fed-c310-72e1-4f19f1b879d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.583199] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549c0669-a5f4-4f76-be10-96bf2fa2d366 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.590473] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d207d263-3939-455b-b001-a26532d11c32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.630599] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dad133e-fc6f-4587-8ab0-0e6b363485cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.639165] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10be98d-fd86-437f-b666-34ed2a8a895a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.653191] env[62974]: DEBUG nova.compute.provider_tree [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.777842] env[62974]: INFO nova.compute.manager [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Rebuilding instance [ 659.803609] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f653acdd-ed66-4016-9a03-3fdbe7304736 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "28c247f6-3179-425d-ae1c-615151b1e2ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.047s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.819813] env[62974]: DEBUG nova.compute.manager [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.820674] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227320b5-aedb-4947-be75-e66c7a294495 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.885749] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "6e81e765-4fe3-42a7-a0ba-9860be897a70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.886082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "6e81e765-4fe3-42a7-a0ba-9860be897a70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.886567] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "6e81e765-4fe3-42a7-a0ba-9860be897a70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.886567] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "6e81e765-4fe3-42a7-a0ba-9860be897a70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.886773] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "6e81e765-4fe3-42a7-a0ba-9860be897a70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.890190] env[62974]: INFO nova.compute.manager [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Terminating instance [ 660.018741] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ab7421-2fed-c310-72e1-4f19f1b879d3, 'name': SearchDatastore_Task, 'duration_secs': 0.027296} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.018741] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.018992] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 660.022101] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b8aa002-f121-4993-9739-9dc005fb9f52 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.030240] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 660.030240] env[62974]: value = "task-2653973" [ 660.030240] env[62974]: _type = "Task" [ 660.030240] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.039167] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.156819] env[62974]: DEBUG nova.scheduler.client.report [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.309381] env[62974]: DEBUG nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.396662] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "refresh_cache-6e81e765-4fe3-42a7-a0ba-9860be897a70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.396662] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquired lock "refresh_cache-6e81e765-4fe3-42a7-a0ba-9860be897a70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.396662] env[62974]: DEBUG nova.network.neutron [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.550553] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510322} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.550553] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 660.550553] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 660.550553] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44a5fcfc-9986-456e-92ce-bfb5192c2a29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.555329] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 660.555329] env[62974]: value = "task-2653974" [ 660.555329] env[62974]: _type = "Task" [ 660.555329] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.566533] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653974, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.662174] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.665468] env[62974]: DEBUG nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.666336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.680s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.668025] env[62974]: INFO nova.compute.claims [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.841018] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 660.841018] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf44100b-2f04-4a4b-8482-10b8ea4e5986 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.849129] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 660.849129] env[62974]: value = "task-2653975" [ 660.849129] env[62974]: _type = "Task" [ 660.849129] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.849129] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.862895] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.920568] env[62974]: DEBUG nova.network.neutron [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.991877] env[62974]: DEBUG nova.network.neutron [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.065723] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653974, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.423557} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.066464] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.067434] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b6c554-6960-4cee-9f8d-4c35d3f76452 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.096277] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.097887] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9182df0-95b8-4f5e-ae7a-e6506d706661 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.124454] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 661.124454] env[62974]: value = "task-2653976" [ 661.124454] env[62974]: _type = "Task" [ 661.124454] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.131450] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.183600] env[62974]: DEBUG nova.compute.utils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 661.185907] env[62974]: DEBUG nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Not allocating networking since 'none' was specified. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 661.358779] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653975, 'name': PowerOffVM_Task, 'duration_secs': 0.24005} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.360076] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 661.360076] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.360476] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330dc888-b25e-45ae-942d-3d6ce3f092d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.367476] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 661.367727] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f752ebc-f51d-4d67-be29-ebbc9231ae34 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.394531] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 661.394754] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 661.394933] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleting the datastore file [datastore1] 28c247f6-3179-425d-ae1c-615151b1e2ff {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 661.395233] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7997239-1230-44f0-95fa-6662801743ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.401148] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 661.401148] env[62974]: value = "task-2653978" [ 661.401148] env[62974]: _type = "Task" [ 661.401148] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.411241] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.495456] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Releasing lock "refresh_cache-6e81e765-4fe3-42a7-a0ba-9860be897a70" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.496278] env[62974]: DEBUG nova.compute.manager [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 661.496619] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.497412] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8627fe9f-363c-488f-832d-195fe6af9610 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.506522] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.506522] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a765111b-4491-4995-874a-5b716ecc0731 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.512094] env[62974]: DEBUG oslo_vmware.api [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 661.512094] env[62974]: value = "task-2653979" [ 661.512094] env[62974]: _type = "Task" [ 661.512094] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.520330] env[62974]: DEBUG oslo_vmware.api [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.632677] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653976, 'name': ReconfigVM_Task, 'duration_secs': 0.291871} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.632966] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 661.635360] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39a95560-a2d7-4a04-a6a6-1210fe55a625 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.644107] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 661.644107] env[62974]: value = "task-2653980" [ 661.644107] env[62974]: _type = "Task" [ 661.644107] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.654210] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653980, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.690060] env[62974]: DEBUG nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 661.914791] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653978, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238696} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.915883] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 661.916180] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 661.916382] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.024336] env[62974]: DEBUG oslo_vmware.api [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653979, 'name': PowerOffVM_Task, 'duration_secs': 0.137061} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.024608] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 662.024819] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 662.025471] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65631b38-354b-4ab9-9501-a2e6acfaa280 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.049786] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 662.049996] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 662.050193] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Deleting the datastore file [datastore1] 6e81e765-4fe3-42a7-a0ba-9860be897a70 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 662.054512] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1586122-0407-442d-bba3-552ca38190fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.060977] env[62974]: DEBUG oslo_vmware.api [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for the task: (returnval){ [ 662.060977] env[62974]: value = "task-2653982" [ 662.060977] env[62974]: _type = "Task" [ 662.060977] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.069289] env[62974]: DEBUG oslo_vmware.api [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.151895] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653980, 'name': Rename_Task, 'duration_secs': 0.145553} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.154104] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.155018] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-309881ab-7526-4a4f-806f-e4820d9de067 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.160928] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 662.160928] env[62974]: value = "task-2653983" [ 662.160928] env[62974]: _type = "Task" [ 662.160928] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.175614] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653983, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.311038] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d1e63a-4b0b-4ad1-8ba4-22338d6eecc8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.317201] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670e0696-f2b2-4e7f-bfee-f8eb3d402677 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.356061] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd435fe-a51a-4232-b403-0cfd0fcd0c8c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.361850] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "3426d512-d54e-4852-8eca-8ba9f5fef418" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.362144] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.367989] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847068d8-9faf-4328-953c-6ece2edebd1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.385171] env[62974]: DEBUG nova.compute.provider_tree [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.571250] env[62974]: DEBUG oslo_vmware.api [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Task: {'id': task-2653982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087961} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.571373] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 662.571626] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 662.571709] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.571879] env[62974]: INFO nova.compute.manager [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Took 1.08 seconds to destroy the instance on the hypervisor. [ 662.572163] env[62974]: DEBUG oslo.service.loopingcall [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.572363] env[62974]: DEBUG nova.compute.manager [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 662.572458] env[62974]: DEBUG nova.network.neutron [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 662.593943] env[62974]: DEBUG nova.network.neutron [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.671637] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653983, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.705650] env[62974]: DEBUG nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 662.735848] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.737015] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.737015] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.737015] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.737015] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.737015] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.737652] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.737652] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.737652] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.737738] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.737860] env[62974]: DEBUG nova.virt.hardware [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.739079] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f7cea4-a2bc-47e5-b96c-a6510639f0cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.746970] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6736e133-9b2d-4242-ad1c-6a6ca20b3a0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.762726] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.769248] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Creating folder: Project (05b0c9722bf54c34819cf8d93b27af82). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.769669] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8548bfa-8032-4b6e-8485-d0b80786abda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.781515] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Created folder: Project (05b0c9722bf54c34819cf8d93b27af82) in parent group-v535199. [ 662.781694] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Creating folder: Instances. Parent ref: group-v535301. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.781972] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7aab15af-96b6-4e65-a5ae-3d0eed05073a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.791469] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Created folder: Instances in parent group-v535301. [ 662.791695] env[62974]: DEBUG oslo.service.loopingcall [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.791883] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.792604] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a31f069-6eeb-487b-8b4e-307a2bf23b93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.809441] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.809441] env[62974]: value = "task-2653986" [ 662.809441] env[62974]: _type = "Task" [ 662.809441] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.816956] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653986, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.890619] env[62974]: DEBUG nova.scheduler.client.report [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.959546] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.959857] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.960124] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.960377] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.960565] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.960786] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.961044] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.961219] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.961392] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.961574] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.961792] env[62974]: DEBUG nova.virt.hardware [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.962694] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a017b457-7158-4969-989b-d18b7d6bfc2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.971703] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6373f687-c7be-48ba-a7ab-616d580ea126 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.988482] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.994350] env[62974]: DEBUG oslo.service.loopingcall [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.995953] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.998437] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26be860d-726a-4000-bb4d-e2301e358d6e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.010882] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.011176] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.016441] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.016441] env[62974]: value = "task-2653987" [ 663.016441] env[62974]: _type = "Task" [ 663.016441] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.024791] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653987, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.098242] env[62974]: DEBUG nova.network.neutron [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.171349] env[62974]: DEBUG oslo_vmware.api [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2653983, 'name': PowerOnVM_Task, 'duration_secs': 0.537881} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.171561] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.172163] env[62974]: INFO nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Took 7.28 seconds to spawn the instance on the hypervisor. [ 663.172163] env[62974]: DEBUG nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.172945] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe9dc57-9007-4209-8abd-16f24cb7d7c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.320987] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653986, 'name': CreateVM_Task, 'duration_secs': 0.273031} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.320987] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.321434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.321608] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.321915] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.322175] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52f109f0-a5e8-4251-8223-a975cc3fcf2f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.331025] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 663.331025] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5262b8c8-fbc6-dc8f-ca96-1ba88ee57842" [ 663.331025] env[62974]: _type = "Task" [ 663.331025] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.338524] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5262b8c8-fbc6-dc8f-ca96-1ba88ee57842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.396399] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.730s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.396987] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 663.400276] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.180s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.400488] env[62974]: DEBUG nova.objects.instance [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lazy-loading 'resources' on Instance uuid b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 663.529075] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653987, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.600493] env[62974]: INFO nova.compute.manager [-] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Took 1.03 seconds to deallocate network for instance. [ 663.689060] env[62974]: INFO nova.compute.manager [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Took 35.20 seconds to build instance. [ 663.840515] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5262b8c8-fbc6-dc8f-ca96-1ba88ee57842, 'name': SearchDatastore_Task, 'duration_secs': 0.010572} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.840844] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.841112] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.841352] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.841497] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.841787] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.841921] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9f9091e-53ef-4013-8dca-3b52600eff4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.850470] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.850742] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.851964] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85b6a187-a427-4034-8bec-b1c023a98565 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.859107] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 663.859107] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525140ba-1972-f3a1-1865-e9fb802126de" [ 663.859107] env[62974]: _type = "Task" [ 663.859107] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.870345] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525140ba-1972-f3a1-1865-e9fb802126de, 'name': SearchDatastore_Task, 'duration_secs': 0.007573} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.871456] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b301ac6-af49-40b5-a85c-f48448ff3c46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.877751] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 663.877751] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c15eac-7f3a-aa99-28c6-6844beef870b" [ 663.877751] env[62974]: _type = "Task" [ 663.877751] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.887560] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c15eac-7f3a-aa99-28c6-6844beef870b, 'name': SearchDatastore_Task, 'duration_secs': 0.007571} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.887794] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.888113] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 69597c3f-ccb2-474d-bb7c-629c5da0b456/69597c3f-ccb2-474d-bb7c-629c5da0b456.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 663.888417] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-292ee2f9-24d8-4d6d-9954-87073854f5b6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.894386] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 663.894386] env[62974]: value = "task-2653988" [ 663.894386] env[62974]: _type = "Task" [ 663.894386] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.908383] env[62974]: DEBUG nova.compute.utils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.912547] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.913388] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.913588] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.982566] env[62974]: DEBUG nova.policy [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d977aa355614e2bbab76080bbe411b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c540bb1bb0e4e86a6e067653ae20895', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 664.027735] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653987, 'name': CreateVM_Task, 'duration_secs': 0.559576} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.031188] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 664.032141] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.032317] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.032615] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 664.033260] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8987bbb3-6552-477a-8b62-1b4d54830390 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.039784] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 664.039784] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dbb40b-fa2e-4bb5-2868-9be7310bf39b" [ 664.039784] env[62974]: _type = "Task" [ 664.039784] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.049825] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dbb40b-fa2e-4bb5-2868-9be7310bf39b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.107726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.191209] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6064a0ed-d1d0-4a07-88ce-ed223aad83ff tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.532s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.301062] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Successfully created port: c24e33da-775e-48dc-8bc1-a5d5571cfdad {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.403921] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489332} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.405830] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 69597c3f-ccb2-474d-bb7c-629c5da0b456/69597c3f-ccb2-474d-bb7c-629c5da0b456.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 664.406023] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.408817] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98c174a7-b812-4918-b3bb-8acb49651053 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.413829] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 664.417648] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 664.417648] env[62974]: value = "task-2653989" [ 664.417648] env[62974]: _type = "Task" [ 664.417648] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.425815] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653989, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.492848] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c8f2ae-020f-4528-b0e2-296f19a9519c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.501931] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5b2e30-76ac-45d5-9038-888133744369 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.535276] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb29af6-aa79-430a-b64b-0036b14e7dc8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.549886] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2173bc43-855d-4b0d-b731-03cd0a584b24 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.555307] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dbb40b-fa2e-4bb5-2868-9be7310bf39b, 'name': SearchDatastore_Task, 'duration_secs': 0.072534} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.555969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.556242] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.556488] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.556808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.556808] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.557060] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3383c320-5962-4d9f-8300-cce39694b32a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.566735] env[62974]: DEBUG nova.compute.provider_tree [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.574718] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.574904] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 664.575660] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-048453d6-1a21-4386-b621-47e19cece2eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.581545] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 664.581545] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5204b840-13a1-3b48-e422-a98f1290e336" [ 664.581545] env[62974]: _type = "Task" [ 664.581545] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.589887] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5204b840-13a1-3b48-e422-a98f1290e336, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.694257] env[62974]: DEBUG nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.940379] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653989, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069815} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.940653] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.941459] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c686b4-19b9-4602-b7bf-7b330465fecf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.964671] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 69597c3f-ccb2-474d-bb7c-629c5da0b456/69597c3f-ccb2-474d-bb7c-629c5da0b456.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.964671] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8660599-c893-490c-b237-13c6c7f5d974 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.983634] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 664.983634] env[62974]: value = "task-2653990" [ 664.983634] env[62974]: _type = "Task" [ 664.983634] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.992535] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653990, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.073024] env[62974]: DEBUG nova.scheduler.client.report [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 665.093942] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5204b840-13a1-3b48-e422-a98f1290e336, 'name': SearchDatastore_Task, 'duration_secs': 0.008059} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.095791] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4a2dc00-4edb-47c0-b4b0-fcc11a2de506 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.102912] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 665.102912] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524f913c-1acd-24d5-304b-ff2a7e8d65f6" [ 665.102912] env[62974]: _type = "Task" [ 665.102912] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.112647] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524f913c-1acd-24d5-304b-ff2a7e8d65f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.221483] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.432187] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 665.462020] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 665.462020] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.462020] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.462481] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.462481] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.462481] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 665.462481] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 665.462481] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 665.462628] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 665.462628] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 665.462684] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 665.463577] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f3cc4d-87bd-47b7-a143-6de0b3657d4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.471835] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6409e18-b79b-4fc1-b0c6-b7d5bcb6a09c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.497315] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653990, 'name': ReconfigVM_Task, 'duration_secs': 0.26416} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.497615] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 69597c3f-ccb2-474d-bb7c-629c5da0b456/69597c3f-ccb2-474d-bb7c-629c5da0b456.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.498234] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-726fae13-b837-48b5-b866-8db7f60d5e29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.504737] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 665.504737] env[62974]: value = "task-2653991" [ 665.504737] env[62974]: _type = "Task" [ 665.504737] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.513032] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653991, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.577163] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.177s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.579952] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.702s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.580183] env[62974]: DEBUG nova.objects.instance [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 665.608147] env[62974]: INFO nova.scheduler.client.report [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Deleted allocations for instance b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70 [ 665.616483] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524f913c-1acd-24d5-304b-ff2a7e8d65f6, 'name': SearchDatastore_Task, 'duration_secs': 0.023511} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.619123] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.619123] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 665.619123] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2de6697f-cfc9-4ec1-abb9-e10c2d930603 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.625718] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 665.625718] env[62974]: value = "task-2653992" [ 665.625718] env[62974]: _type = "Task" [ 665.625718] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.635106] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.757125] env[62974]: DEBUG nova.compute.manager [req-adc8fe1a-ab54-4743-a044-8ff1f7694aaa req-e573f409-a700-43d6-940a-5cef55c35338 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Received event network-vif-plugged-c24e33da-775e-48dc-8bc1-a5d5571cfdad {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 665.757125] env[62974]: DEBUG oslo_concurrency.lockutils [req-adc8fe1a-ab54-4743-a044-8ff1f7694aaa req-e573f409-a700-43d6-940a-5cef55c35338 service nova] Acquiring lock "2ebb3385-4177-4506-a4b0-52b53405cf49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.757125] env[62974]: DEBUG oslo_concurrency.lockutils [req-adc8fe1a-ab54-4743-a044-8ff1f7694aaa req-e573f409-a700-43d6-940a-5cef55c35338 service nova] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.757125] env[62974]: DEBUG oslo_concurrency.lockutils [req-adc8fe1a-ab54-4743-a044-8ff1f7694aaa req-e573f409-a700-43d6-940a-5cef55c35338 service nova] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.757125] env[62974]: DEBUG nova.compute.manager [req-adc8fe1a-ab54-4743-a044-8ff1f7694aaa req-e573f409-a700-43d6-940a-5cef55c35338 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] No waiting events found dispatching network-vif-plugged-c24e33da-775e-48dc-8bc1-a5d5571cfdad {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.757396] env[62974]: WARNING nova.compute.manager [req-adc8fe1a-ab54-4743-a044-8ff1f7694aaa req-e573f409-a700-43d6-940a-5cef55c35338 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Received unexpected event network-vif-plugged-c24e33da-775e-48dc-8bc1-a5d5571cfdad for instance with vm_state building and task_state spawning. [ 665.924927] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "bcacc508-b910-4144-bf0b-454b0928ca71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.925201] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "bcacc508-b910-4144-bf0b-454b0928ca71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.016737] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653991, 'name': Rename_Task, 'duration_secs': 0.171222} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.017084] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.017825] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f20e91f9-c3aa-4a01-b990-99e91703c7c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.025595] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 666.025595] env[62974]: value = "task-2653993" [ 666.025595] env[62974]: _type = "Task" [ 666.025595] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.037048] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.067170] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Successfully updated port: c24e33da-775e-48dc-8bc1-a5d5571cfdad {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.125679] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3e688faa-15de-4c14-bd89-e37318bffb2a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.635s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.138694] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653992, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492994} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.139037] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 666.139285] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 666.140450] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a42d3072-4a9b-4aa0-a721-dfae4f956049 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.145714] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 666.145714] env[62974]: value = "task-2653994" [ 666.145714] env[62974]: _type = "Task" [ 666.145714] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.153524] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653994, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.541032] env[62974]: DEBUG oslo_vmware.api [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2653993, 'name': PowerOnVM_Task, 'duration_secs': 0.464566} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.541217] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 666.541460] env[62974]: INFO nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Took 3.84 seconds to spawn the instance on the hypervisor. [ 666.541636] env[62974]: DEBUG nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 666.542918] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666943f4-6ca3-47a5-b7b5-b67e2618c182 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.570310] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "refresh_cache-2ebb3385-4177-4506-a4b0-52b53405cf49" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.570358] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "refresh_cache-2ebb3385-4177-4506-a4b0-52b53405cf49" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.570474] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.589843] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dabb604b-4fcf-4242-b11e-7561f658b4ad tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.590950] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.262s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.591187] env[62974]: DEBUG nova.objects.instance [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lazy-loading 'resources' on Instance uuid 22a0a34a-c46b-4246-9a80-3540550bd793 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 666.659379] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653994, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064751} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.659551] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 666.660364] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c269fadd-89a2-4f16-aa6c-14b1e076d21e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.681639] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 666.681639] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a366e91-d3b9-4d49-a2b6-6d1ab6c28f23 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.707873] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 666.707873] env[62974]: value = "task-2653995" [ 666.707873] env[62974]: _type = "Task" [ 666.707873] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.717252] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653995, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.071321] env[62974]: INFO nova.compute.manager [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Took 37.15 seconds to build instance. [ 667.108472] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.225650] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.267771] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Updating instance_info_cache with network_info: [{"id": "c24e33da-775e-48dc-8bc1-a5d5571cfdad", "address": "fa:16:3e:af:72:24", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24e33da-77", "ovs_interfaceid": "c24e33da-775e-48dc-8bc1-a5d5571cfdad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.573576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-144aefbb-a486-4d5e-887b-cde99d49edd1 tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "69597c3f-ccb2-474d-bb7c-629c5da0b456" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.929s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.636146] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e464925-e2ec-4692-87d2-833b517fc35b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.646139] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1894f1-5d79-4cea-83b0-322ee0a67705 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.682030] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8249a7f6-2014-422b-8ed9-7ca1bb5e3f15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.689989] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6d4003-a00f-4de1-89e8-ccabe184b5e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.707251] env[62974]: DEBUG nova.compute.provider_tree [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.721802] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653995, 'name': ReconfigVM_Task, 'duration_secs': 0.905374} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.722147] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 28c247f6-3179-425d-ae1c-615151b1e2ff/28c247f6-3179-425d-ae1c-615151b1e2ff.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 667.722762] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6960033-6a96-4453-9caf-c47806a5da3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.729478] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 667.729478] env[62974]: value = "task-2653996" [ 667.729478] env[62974]: _type = "Task" [ 667.729478] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.739562] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653996, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.771202] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "refresh_cache-2ebb3385-4177-4506-a4b0-52b53405cf49" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.771544] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Instance network_info: |[{"id": "c24e33da-775e-48dc-8bc1-a5d5571cfdad", "address": "fa:16:3e:af:72:24", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24e33da-77", "ovs_interfaceid": "c24e33da-775e-48dc-8bc1-a5d5571cfdad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.771971] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:72:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c24e33da-775e-48dc-8bc1-a5d5571cfdad', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.784318] env[62974]: DEBUG oslo.service.loopingcall [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.784318] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.784318] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-547475ac-0d61-4743-96b8-a976dcc22f55 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.805702] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.805702] env[62974]: value = "task-2653997" [ 667.805702] env[62974]: _type = "Task" [ 667.805702] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.816524] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653997, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.041837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.041837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.041837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.041837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.043728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.043728] env[62974]: INFO nova.compute.manager [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Terminating instance [ 668.043728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "69fb00b3-6a41-4ef5-8876-6548cae31c07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.043728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.078361] env[62974]: DEBUG nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 668.092514] env[62974]: DEBUG nova.compute.manager [None req-c10385d9-dae6-41b4-a12f-f6db302cb2bb tempest-ServerDiagnosticsV248Test-1434845971 tempest-ServerDiagnosticsV248Test-1434845971-project-admin] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.093680] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e08d75d-fe2e-4ffb-b7ca-d296cf9ecd7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.100168] env[62974]: INFO nova.compute.manager [None req-c10385d9-dae6-41b4-a12f-f6db302cb2bb tempest-ServerDiagnosticsV248Test-1434845971 tempest-ServerDiagnosticsV248Test-1434845971-project-admin] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Retrieving diagnostics [ 668.100885] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c0ad80-9876-4ee0-8e46-8e7b9e7e0cba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.207808] env[62974]: DEBUG nova.scheduler.client.report [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 668.238357] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653996, 'name': Rename_Task, 'duration_secs': 0.174877} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.238623] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 668.238847] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-973d3cc8-3406-4b6e-8620-e7473de222d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.245750] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 668.245750] env[62974]: value = "task-2653998" [ 668.245750] env[62974]: _type = "Task" [ 668.245750] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.253484] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.316147] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2653997, 'name': CreateVM_Task, 'duration_secs': 0.339445} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.316282] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.316955] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.317185] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.317668] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 668.318016] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae7363bf-88f1-4770-bddc-33793bd57587 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.323764] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 668.323764] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522354b5-e857-d26a-1584-dd4f4671233f" [ 668.323764] env[62974]: _type = "Task" [ 668.323764] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.335937] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522354b5-e857-d26a-1584-dd4f4671233f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.369591] env[62974]: DEBUG nova.compute.manager [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Received event network-changed-c24e33da-775e-48dc-8bc1-a5d5571cfdad {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 668.369591] env[62974]: DEBUG nova.compute.manager [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Refreshing instance network info cache due to event network-changed-c24e33da-775e-48dc-8bc1-a5d5571cfdad. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 668.369591] env[62974]: DEBUG oslo_concurrency.lockutils [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] Acquiring lock "refresh_cache-2ebb3385-4177-4506-a4b0-52b53405cf49" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.369591] env[62974]: DEBUG oslo_concurrency.lockutils [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] Acquired lock "refresh_cache-2ebb3385-4177-4506-a4b0-52b53405cf49" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.369591] env[62974]: DEBUG nova.network.neutron [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Refreshing network info cache for port c24e33da-775e-48dc-8bc1-a5d5571cfdad {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 668.504682] env[62974]: DEBUG nova.compute.manager [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 668.504937] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.505951] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b66c44-20c6-435a-94cb-6c37d09c0d28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.514122] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.514377] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e13a8483-b382-40e1-9fe5-bf9b900c7714 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.519793] env[62974]: DEBUG oslo_vmware.api [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 668.519793] env[62974]: value = "task-2653999" [ 668.519793] env[62974]: _type = "Task" [ 668.519793] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.529798] env[62974]: DEBUG oslo_vmware.api [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.602809] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.715039] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.124s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.717694] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.183s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.719589] env[62974]: INFO nova.compute.claims [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.742333] env[62974]: INFO nova.scheduler.client.report [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Deleted allocations for instance 22a0a34a-c46b-4246-9a80-3540550bd793 [ 668.757328] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653998, 'name': PowerOnVM_Task} progress is 74%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.834577] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522354b5-e857-d26a-1584-dd4f4671233f, 'name': SearchDatastore_Task, 'duration_secs': 0.011091} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.835781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.835781] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.835781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.835781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.836215] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 668.836215] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ce56777-251b-4cc0-b596-f3eb1b31c4c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.844127] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 668.844346] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 668.845191] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-436cf543-e62b-49fc-97fc-7e5cffb7b6ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.850888] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 668.850888] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529a51e2-c43d-e10d-f6d3-3008b7fb948c" [ 668.850888] env[62974]: _type = "Task" [ 668.850888] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.858750] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529a51e2-c43d-e10d-f6d3-3008b7fb948c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.037842] env[62974]: DEBUG oslo_vmware.api [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2653999, 'name': PowerOffVM_Task, 'duration_secs': 0.259466} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.038317] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.038695] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.038936] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1dd74c9-d97b-45ea-be07-f3cbe33c02f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.101723] env[62974]: DEBUG nova.network.neutron [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Updated VIF entry in instance network info cache for port c24e33da-775e-48dc-8bc1-a5d5571cfdad. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 669.102216] env[62974]: DEBUG nova.network.neutron [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Updating instance_info_cache with network_info: [{"id": "c24e33da-775e-48dc-8bc1-a5d5571cfdad", "address": "fa:16:3e:af:72:24", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc24e33da-77", "ovs_interfaceid": "c24e33da-775e-48dc-8bc1-a5d5571cfdad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.111634] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 669.112709] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 669.112709] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Deleting the datastore file [datastore1] a63aa120-1c7b-4abc-93cf-4d138f5cebde {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.112709] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-793d485b-b30c-435b-8906-ba64fa1afa9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.120482] env[62974]: DEBUG oslo_vmware.api [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for the task: (returnval){ [ 669.120482] env[62974]: value = "task-2654001" [ 669.120482] env[62974]: _type = "Task" [ 669.120482] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.129144] env[62974]: DEBUG oslo_vmware.api [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2654001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.218520] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.219084] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.219487] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.220292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.220292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.222683] env[62974]: INFO nova.compute.manager [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Terminating instance [ 669.259069] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6ca827c2-b940-44df-a5b2-9c8795760540 tempest-ServerExternalEventsTest-1969222377 tempest-ServerExternalEventsTest-1969222377-project-member] Lock "22a0a34a-c46b-4246-9a80-3540550bd793" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.593s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.265145] env[62974]: DEBUG oslo_vmware.api [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2653998, 'name': PowerOnVM_Task, 'duration_secs': 0.701733} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.265441] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 669.265657] env[62974]: DEBUG nova.compute.manager [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 669.266469] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b03e9d-d62c-4cf0-a63c-5d8309a2cd56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.361940] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529a51e2-c43d-e10d-f6d3-3008b7fb948c, 'name': SearchDatastore_Task, 'duration_secs': 0.009018} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.362256] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5c09f4a-7d12-4305-8720-87e0e36b67e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.371861] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 669.371861] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5294bdf8-d162-07c2-300a-c8752675e116" [ 669.371861] env[62974]: _type = "Task" [ 669.371861] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.381055] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5294bdf8-d162-07c2-300a-c8752675e116, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.606047] env[62974]: DEBUG oslo_concurrency.lockutils [req-09dc7f05-e77c-4406-b34f-076a7515fdfe req-e6e9959d-a65a-4571-b71f-5fd961cf7242 service nova] Releasing lock "refresh_cache-2ebb3385-4177-4506-a4b0-52b53405cf49" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.631115] env[62974]: DEBUG oslo_vmware.api [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Task: {'id': task-2654001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139714} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.631365] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.631551] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.631729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.631898] env[62974]: INFO nova.compute.manager [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Took 1.13 seconds to destroy the instance on the hypervisor. [ 669.632998] env[62974]: DEBUG oslo.service.loopingcall [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 669.632998] env[62974]: DEBUG nova.compute.manager [-] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 669.632998] env[62974]: DEBUG nova.network.neutron [-] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.734946] env[62974]: DEBUG nova.compute.manager [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 669.735182] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 669.736132] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a52c0b5-6fc0-4fc1-9ce4-5ccc6b0d4af8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.744483] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 669.744535] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-606ef752-90ec-4b92-8974-5a018ed5b5d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.751037] env[62974]: DEBUG oslo_vmware.api [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 669.751037] env[62974]: value = "task-2654002" [ 669.751037] env[62974]: _type = "Task" [ 669.751037] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.762264] env[62974]: DEBUG oslo_vmware.api [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2654002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.787769] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.886578] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5294bdf8-d162-07c2-300a-c8752675e116, 'name': SearchDatastore_Task, 'duration_secs': 0.011962} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.886935] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.887253] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 2ebb3385-4177-4506-a4b0-52b53405cf49/2ebb3385-4177-4506-a4b0-52b53405cf49.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 669.887570] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80a61fa5-cc02-4e2c-b638-5c7da2d2f5f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.894209] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 669.894209] env[62974]: value = "task-2654003" [ 669.894209] env[62974]: _type = "Task" [ 669.894209] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.903637] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654003, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.260698] env[62974]: DEBUG oslo_vmware.api [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2654002, 'name': PowerOffVM_Task, 'duration_secs': 0.253526} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.263787] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 670.264075] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 670.266503] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8bad666f-7e8f-47d2-9571-3a0a318408ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.306807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2c4b3f-7ed9-4a20-930f-5b5c2dbde021 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.323599] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052bdce9-f0a6-4e80-ab64-74e293ac1209 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.359279] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b5b98f-c570-42f7-a4fd-6a355348c9cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.362287] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 670.362562] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 670.362783] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Deleting the datastore file [datastore1] 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 670.363097] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fa77dbc-4610-4801-a51d-79e18ff91ee7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.370761] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445ad096-64bf-477d-a264-e26c1193b625 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.376740] env[62974]: DEBUG oslo_vmware.api [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for the task: (returnval){ [ 670.376740] env[62974]: value = "task-2654005" [ 670.376740] env[62974]: _type = "Task" [ 670.376740] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.390684] env[62974]: DEBUG nova.compute.provider_tree [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.399439] env[62974]: DEBUG oslo_vmware.api [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2654005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.407597] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654003, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.487803] env[62974]: DEBUG nova.compute.manager [req-6780781b-a5b3-4435-9ecd-7bc753961934 req-4cfe1113-8263-4a16-8963-6b2eee60271e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Received event network-vif-deleted-7dbab348-e4dd-46db-ae81-292fbfcd16dc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 670.488000] env[62974]: INFO nova.compute.manager [req-6780781b-a5b3-4435-9ecd-7bc753961934 req-4cfe1113-8263-4a16-8963-6b2eee60271e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Neutron deleted interface 7dbab348-e4dd-46db-ae81-292fbfcd16dc; detaching it from the instance and deleting it from the info cache [ 670.488175] env[62974]: DEBUG nova.network.neutron [req-6780781b-a5b3-4435-9ecd-7bc753961934 req-4cfe1113-8263-4a16-8963-6b2eee60271e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.489605] env[62974]: DEBUG nova.network.neutron [-] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.735975] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "28c247f6-3179-425d-ae1c-615151b1e2ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.736300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "28c247f6-3179-425d-ae1c-615151b1e2ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.736612] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "28c247f6-3179-425d-ae1c-615151b1e2ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.736690] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "28c247f6-3179-425d-ae1c-615151b1e2ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.737045] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "28c247f6-3179-425d-ae1c-615151b1e2ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.738924] env[62974]: INFO nova.compute.manager [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Terminating instance [ 670.888195] env[62974]: DEBUG oslo_vmware.api [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Task: {'id': task-2654005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173747} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.888514] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 670.888731] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 670.888942] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.889159] env[62974]: INFO nova.compute.manager [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Took 1.15 seconds to destroy the instance on the hypervisor. [ 670.889408] env[62974]: DEBUG oslo.service.loopingcall [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.889730] env[62974]: DEBUG nova.compute.manager [-] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 670.889730] env[62974]: DEBUG nova.network.neutron [-] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.894697] env[62974]: DEBUG nova.scheduler.client.report [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.909358] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654003, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529092} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.909785] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 2ebb3385-4177-4506-a4b0-52b53405cf49/2ebb3385-4177-4506-a4b0-52b53405cf49.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 670.910369] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 670.911324] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1caffa3-cde4-4462-916a-e10015a87778 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.924022] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 670.924022] env[62974]: value = "task-2654006" [ 670.924022] env[62974]: _type = "Task" [ 670.924022] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.935773] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654006, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.991520] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7fd2750-b264-4f33-96a7-ab732fcc46ea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.995338] env[62974]: INFO nova.compute.manager [-] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Took 1.36 seconds to deallocate network for instance. [ 671.004483] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7416c0af-fc11-4493-9b8c-922f1a9bfeee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.041668] env[62974]: DEBUG nova.compute.manager [req-6780781b-a5b3-4435-9ecd-7bc753961934 req-4cfe1113-8263-4a16-8963-6b2eee60271e service nova] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Detach interface failed, port_id=7dbab348-e4dd-46db-ae81-292fbfcd16dc, reason: Instance a63aa120-1c7b-4abc-93cf-4d138f5cebde could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 671.242723] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "refresh_cache-28c247f6-3179-425d-ae1c-615151b1e2ff" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.242944] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "refresh_cache-28c247f6-3179-425d-ae1c-615151b1e2ff" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.243155] env[62974]: DEBUG nova.network.neutron [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.357862] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "4967d5be-6cd4-4f23-aca4-d9ae11112369" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.357862] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "4967d5be-6cd4-4f23-aca4-d9ae11112369" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.402583] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.403199] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 671.406861] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.328s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.407095] env[62974]: DEBUG nova.objects.instance [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lazy-loading 'resources' on Instance uuid 30fcd64c-4570-454b-a7e5-3246c92d90fc {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 671.440682] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654006, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078938} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.441124] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 671.442550] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7791fae8-1cc3-493b-a831-db041ff2fd46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.470486] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 2ebb3385-4177-4506-a4b0-52b53405cf49/2ebb3385-4177-4506-a4b0-52b53405cf49.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 671.471167] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-057a6141-1dda-4b45-a858-47c3f0a5c5f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.493229] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 671.493229] env[62974]: value = "task-2654007" [ 671.493229] env[62974]: _type = "Task" [ 671.493229] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.504332] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.519163] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.762781] env[62974]: DEBUG nova.network.neutron [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.818562] env[62974]: DEBUG nova.network.neutron [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.910611] env[62974]: DEBUG nova.compute.utils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.915214] env[62974]: DEBUG nova.network.neutron [-] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.917885] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 671.917885] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 671.959851] env[62974]: DEBUG nova.policy [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d977aa355614e2bbab76080bbe411b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c540bb1bb0e4e86a6e067653ae20895', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 672.006337] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654007, 'name': ReconfigVM_Task, 'duration_secs': 0.368802} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.006392] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 2ebb3385-4177-4506-a4b0-52b53405cf49/2ebb3385-4177-4506-a4b0-52b53405cf49.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.007050] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d47638f1-d435-43de-99ca-c80c03748b06 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.015235] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 672.015235] env[62974]: value = "task-2654008" [ 672.015235] env[62974]: _type = "Task" [ 672.015235] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.024043] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654008, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.235503] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Successfully created port: c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 672.322749] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "refresh_cache-28c247f6-3179-425d-ae1c-615151b1e2ff" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.323054] env[62974]: DEBUG nova.compute.manager [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 672.323245] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 672.324262] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01747cb7-ed17-44b0-bd85-8c7f71aff4cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.331843] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 672.332079] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5d405cd-8baa-4821-8cb7-0baa1b2e1c62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.337756] env[62974]: DEBUG oslo_vmware.api [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 672.337756] env[62974]: value = "task-2654009" [ 672.337756] env[62974]: _type = "Task" [ 672.337756] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.344987] env[62974]: DEBUG oslo_vmware.api [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.388248] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4135cb2f-89b3-4e24-be76-99b3fc97a6ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.399019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f3ca05-38d2-46b8-a67d-39873181e092 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.425372] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 672.430816] env[62974]: INFO nova.compute.manager [-] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Took 1.54 seconds to deallocate network for instance. [ 672.430816] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d21dd3-3315-4587-bcf0-80ffefed2b0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.444321] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa222a4-eedf-4a02-9f73-aad6611457e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.458484] env[62974]: DEBUG nova.compute.provider_tree [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.510957] env[62974]: DEBUG nova.compute.manager [req-7c37e9af-37ea-448c-b529-f3bb99cec834 req-9ebf3ed5-9bb2-48d2-86aa-67b9892d6a82 service nova] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Received event network-vif-deleted-a4073f26-c2d4-4275-aced-337895f21b0c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 672.525038] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654008, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.849474] env[62974]: DEBUG oslo_vmware.api [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.941692] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.964282] env[62974]: DEBUG nova.scheduler.client.report [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 673.025835] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654008, 'name': Rename_Task, 'duration_secs': 0.934995} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.026108] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 673.026354] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8206dd9-b2f9-491e-a391-53be53ecca30 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.032932] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 673.032932] env[62974]: value = "task-2654010" [ 673.032932] env[62974]: _type = "Task" [ 673.032932] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.040786] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.350377] env[62974]: DEBUG oslo_vmware.api [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654009, 'name': PowerOffVM_Task, 'duration_secs': 0.57325} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.350729] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 673.350900] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 673.351239] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5af772e-bdc9-4aea-8a0d-7ed38b5a6f3e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.376482] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 673.376693] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 673.376872] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleting the datastore file [datastore2] 28c247f6-3179-425d-ae1c-615151b1e2ff {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.377150] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eeabc3a8-fa2a-4db4-830a-ef121d00c1d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.383319] env[62974]: DEBUG oslo_vmware.api [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 673.383319] env[62974]: value = "task-2654012" [ 673.383319] env[62974]: _type = "Task" [ 673.383319] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.391087] env[62974]: DEBUG oslo_vmware.api [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.434584] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 673.463841] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 673.464134] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 673.464295] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 673.464477] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 673.464622] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 673.464857] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 673.465153] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 673.465372] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 673.465514] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 673.465809] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 673.465865] env[62974]: DEBUG nova.virt.hardware [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 673.466789] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60916332-de67-46ac-986d-c0ae785ceb5b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.470467] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.064s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.472648] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.304s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.474243] env[62974]: INFO nova.compute.claims [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.484027] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58c68d0-061f-4e82-bf42-f40e55c23c93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.503175] env[62974]: INFO nova.scheduler.client.report [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Deleted allocations for instance 30fcd64c-4570-454b-a7e5-3246c92d90fc [ 673.544232] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654010, 'name': PowerOnVM_Task, 'duration_secs': 0.498341} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.544523] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 673.544744] env[62974]: INFO nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Took 8.11 seconds to spawn the instance on the hypervisor. [ 673.544923] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 673.546068] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d83e1d1-2f9e-496a-ac53-a92249141916 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.724863] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Successfully updated port: c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 673.893131] env[62974]: DEBUG oslo_vmware.api [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09178} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.893371] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.893542] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 673.893720] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 673.893890] env[62974]: INFO nova.compute.manager [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Took 1.57 seconds to destroy the instance on the hypervisor. [ 673.894143] env[62974]: DEBUG oslo.service.loopingcall [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 673.894334] env[62974]: DEBUG nova.compute.manager [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 673.894430] env[62974]: DEBUG nova.network.neutron [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.912809] env[62974]: DEBUG nova.network.neutron [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.011287] env[62974]: DEBUG oslo_concurrency.lockutils [None req-428ab7a8-7608-4e86-9184-9b5c0c84d7f3 tempest-DeleteServersAdminTestJSON-1055103841 tempest-DeleteServersAdminTestJSON-1055103841-project-member] Lock "30fcd64c-4570-454b-a7e5-3246c92d90fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.340s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.061307] env[62974]: INFO nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Took 40.10 seconds to build instance. [ 674.227874] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "refresh_cache-ea2227ff-f694-4baa-af17-dc50338d8fa6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.228043] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "refresh_cache-ea2227ff-f694-4baa-af17-dc50338d8fa6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.228193] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 674.415267] env[62974]: DEBUG nova.network.neutron [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.545620] env[62974]: DEBUG nova.compute.manager [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Received event network-vif-plugged-c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 674.545620] env[62974]: DEBUG oslo_concurrency.lockutils [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] Acquiring lock "ea2227ff-f694-4baa-af17-dc50338d8fa6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.546779] env[62974]: DEBUG oslo_concurrency.lockutils [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.546999] env[62974]: DEBUG oslo_concurrency.lockutils [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.547222] env[62974]: DEBUG nova.compute.manager [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] No waiting events found dispatching network-vif-plugged-c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 674.547561] env[62974]: WARNING nova.compute.manager [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Received unexpected event network-vif-plugged-c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f for instance with vm_state building and task_state spawning. [ 674.547783] env[62974]: DEBUG nova.compute.manager [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Received event network-changed-c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 674.547937] env[62974]: DEBUG nova.compute.manager [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Refreshing instance network info cache due to event network-changed-c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 674.548115] env[62974]: DEBUG oslo_concurrency.lockutils [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] Acquiring lock "refresh_cache-ea2227ff-f694-4baa-af17-dc50338d8fa6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.563500] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.297s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.764159] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.917669] env[62974]: INFO nova.compute.manager [-] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Took 1.02 seconds to deallocate network for instance. [ 674.943044] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e552770-995b-4739-92a6-f4500ee9edcd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.954713] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e1c8df-8012-4a0d-8848-9776b881c049 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.985808] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e887ba8b-1dd7-42df-ae9d-938ee5883b6d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.989182] env[62974]: DEBUG nova.network.neutron [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Updating instance_info_cache with network_info: [{"id": "c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f", "address": "fa:16:3e:fd:a3:3a", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7896e84-7b", "ovs_interfaceid": "c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.996058] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84860008-7135-4902-976f-d72d060512f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.010679] env[62974]: DEBUG nova.compute.provider_tree [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.068065] env[62974]: DEBUG nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 675.424278] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.491713] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "refresh_cache-ea2227ff-f694-4baa-af17-dc50338d8fa6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.492167] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Instance network_info: |[{"id": "c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f", "address": "fa:16:3e:fd:a3:3a", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7896e84-7b", "ovs_interfaceid": "c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 675.492529] env[62974]: DEBUG oslo_concurrency.lockutils [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] Acquired lock "refresh_cache-ea2227ff-f694-4baa-af17-dc50338d8fa6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.492747] env[62974]: DEBUG nova.network.neutron [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Refreshing network info cache for port c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.494523] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:a3:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 675.503080] env[62974]: DEBUG oslo.service.loopingcall [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 675.504302] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 675.504630] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac45c774-62db-4147-adff-5820043fa3bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.520473] env[62974]: DEBUG nova.scheduler.client.report [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 675.529426] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 675.529426] env[62974]: value = "task-2654013" [ 675.529426] env[62974]: _type = "Task" [ 675.529426] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.541683] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654013, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.593719] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.866954] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "05742180-08db-45db-9ee0-e359aa8af2f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.867512] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "05742180-08db-45db-9ee0-e359aa8af2f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.868294] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "05742180-08db-45db-9ee0-e359aa8af2f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.868596] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "05742180-08db-45db-9ee0-e359aa8af2f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.868858] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "05742180-08db-45db-9ee0-e359aa8af2f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.871296] env[62974]: INFO nova.compute.manager [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Terminating instance [ 676.026033] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.026266] env[62974]: DEBUG nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 676.029034] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 33.663s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.043825] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654013, 'name': CreateVM_Task, 'duration_secs': 0.360575} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.043825] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 676.043825] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.043825] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.044167] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 676.044738] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-241d9035-3bb2-45da-b039-9db08e072906 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.049271] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 676.049271] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523e5298-c977-76d4-d65e-dda287df1547" [ 676.049271] env[62974]: _type = "Task" [ 676.049271] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.057856] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523e5298-c977-76d4-d65e-dda287df1547, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.340030] env[62974]: DEBUG nova.network.neutron [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Updated VIF entry in instance network info cache for port c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 676.340410] env[62974]: DEBUG nova.network.neutron [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Updating instance_info_cache with network_info: [{"id": "c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f", "address": "fa:16:3e:fd:a3:3a", "network": {"id": "afe215de-c2bb-4d77-9752-d926c251a335", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-366104626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c540bb1bb0e4e86a6e067653ae20895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7896e84-7b", "ovs_interfaceid": "c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.375834] env[62974]: DEBUG nova.compute.manager [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 676.376104] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 676.377426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1368050e-6b18-4765-ad19-fda33a934c0b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.385538] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 676.385793] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9b530dd-a4d6-48a5-bc41-56608de2b913 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.393160] env[62974]: DEBUG oslo_vmware.api [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 676.393160] env[62974]: value = "task-2654014" [ 676.393160] env[62974]: _type = "Task" [ 676.393160] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.401310] env[62974]: DEBUG oslo_vmware.api [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2654014, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.547422] env[62974]: DEBUG nova.compute.utils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.549638] env[62974]: DEBUG nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 676.549820] env[62974]: DEBUG nova.network.neutron [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.562740] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523e5298-c977-76d4-d65e-dda287df1547, 'name': SearchDatastore_Task, 'duration_secs': 0.010079} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.563092] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.563348] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.563682] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.564062] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.564224] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.565068] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eadfabdb-59a9-4840-8c5b-96840a727950 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.577145] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.577145] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.577145] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03e3b620-709b-4e0f-8d81-86155ef68d36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.586779] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 676.586779] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52697587-fa74-70ab-fb3a-758cc6012d69" [ 676.586779] env[62974]: _type = "Task" [ 676.586779] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.598351] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52697587-fa74-70ab-fb3a-758cc6012d69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.600831] env[62974]: DEBUG nova.policy [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '861e5f8e6cd94fc7aeff2c22bd65df53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9de9a9e0393b4445a0ce8b5cd0df272f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 676.843179] env[62974]: DEBUG oslo_concurrency.lockutils [req-db56a65f-ef50-4b43-a48c-dd29c3e24cef req-708d083e-0b78-4148-badf-35a859f91d65 service nova] Releasing lock "refresh_cache-ea2227ff-f694-4baa-af17-dc50338d8fa6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.904734] env[62974]: DEBUG oslo_vmware.api [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2654014, 'name': PowerOffVM_Task, 'duration_secs': 0.257043} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.905063] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 676.905318] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 676.905564] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67152e0a-5641-47e0-a315-9a24d26dee17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.979019] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 676.979019] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 676.979019] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Deleting the datastore file [datastore2] 05742180-08db-45db-9ee0-e359aa8af2f0 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 676.979019] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34fc4581-5ff5-465e-bcc1-5046c276f16d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.986257] env[62974]: DEBUG oslo_vmware.api [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for the task: (returnval){ [ 676.986257] env[62974]: value = "task-2654016" [ 676.986257] env[62974]: _type = "Task" [ 676.986257] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.993728] env[62974]: DEBUG oslo_vmware.api [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2654016, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.023537] env[62974]: DEBUG nova.network.neutron [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Successfully created port: 9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.055414] env[62974]: DEBUG nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 677.083067] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085118] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085118] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a63aa120-1c7b-4abc-93cf-4d138f5cebde is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085118] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cf73422d-7f4b-4bae-9d69-de74d7211243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085118] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 1933bc47-1717-48c1-b4a2-492a17573de7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085350] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance d8b7a39f-ec73-4a87-9b1e-9428ca72f895 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085350] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance f9adcd7e-58a0-433c-8602-cca814b84aaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085350] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance b3827c67-9075-4a53-9f9e-8651e3f4b211 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085350] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 05742180-08db-45db-9ee0-e359aa8af2f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085531] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 85f8f79d-330a-49cd-b1ae-8de20c70fcab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085531] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 41f20cb7-c9f9-4201-ae16-4f977dae26cf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085531] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a7a014b9-10e1-45a0-85da-4754051e8d82 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085531] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085531] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085687] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 5bc466fb-eebb-40b1-ba09-614a25782ecd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085687] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 6e81e765-4fe3-42a7-a0ba-9860be897a70 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085687] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 28c247f6-3179-425d-ae1c-615151b1e2ff is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 677.085687] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 366b5816-a847-48d1-ad03-5758e473a9d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085809] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 69597c3f-ccb2-474d-bb7c-629c5da0b456 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085809] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 2ebb3385-4177-4506-a4b0-52b53405cf49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085809] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance ea2227ff-f694-4baa-af17-dc50338d8fa6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.085809] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 59ece0e8-85c2-499d-aba2-fd45fc116013 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.097108] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52697587-fa74-70ab-fb3a-758cc6012d69, 'name': SearchDatastore_Task, 'duration_secs': 0.008728} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.097923] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56e714ef-77e3-4961-acd7-5d1db4e01d9c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.103574] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 677.103574] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52231f6d-ef12-1e98-fb8d-43edf9032793" [ 677.103574] env[62974]: _type = "Task" [ 677.103574] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.112981] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52231f6d-ef12-1e98-fb8d-43edf9032793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.493874] env[62974]: DEBUG oslo_vmware.api [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Task: {'id': task-2654016, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212426} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.494191] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 677.494340] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 677.494515] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 677.494686] env[62974]: INFO nova.compute.manager [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 677.494927] env[62974]: DEBUG oslo.service.loopingcall [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 677.495124] env[62974]: DEBUG nova.compute.manager [-] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 677.495212] env[62974]: DEBUG nova.network.neutron [-] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 677.594778] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c763d45b-44f0-4557-a726-7aad2bc58ba8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.616903] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52231f6d-ef12-1e98-fb8d-43edf9032793, 'name': SearchDatastore_Task, 'duration_secs': 0.010808} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.617233] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.617525] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] ea2227ff-f694-4baa-af17-dc50338d8fa6/ea2227ff-f694-4baa-af17-dc50338d8fa6.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 677.617783] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1728ec8b-a680-4d7f-8bc1-5a86933c2eb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.625318] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 677.625318] env[62974]: value = "task-2654017" [ 677.625318] env[62974]: _type = "Task" [ 677.625318] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.633796] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654017, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.707812] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "65615fd7-c219-4c19-8ecf-11336b616ead" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.708132] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.069269] env[62974]: DEBUG nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 678.101709] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 678.102062] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.102759] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 678.102759] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.102759] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 678.102759] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 678.103038] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 678.103236] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 678.103451] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 678.103649] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 678.103859] env[62974]: DEBUG nova.virt.hardware [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 678.104679] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 669cd72c-556f-40b6-8bc2-f50a125c182a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 678.108027] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ed2d7b-e3ae-4e5e-b599-126873e00388 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.118019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6744a00d-6146-4462-b71e-3aa018a9369a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.124594] env[62974]: DEBUG nova.compute.manager [req-4ab24520-90ef-493d-8437-3527ae6be963 req-59dee07e-50dc-4a2e-a7c5-3a56890b627b service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Received event network-vif-deleted-39515e98-a8f4-4af9-9948-b0a5d05d3188 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 678.125206] env[62974]: INFO nova.compute.manager [req-4ab24520-90ef-493d-8437-3527ae6be963 req-59dee07e-50dc-4a2e-a7c5-3a56890b627b service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Neutron deleted interface 39515e98-a8f4-4af9-9948-b0a5d05d3188; detaching it from the instance and deleting it from the info cache [ 678.125206] env[62974]: DEBUG nova.network.neutron [req-4ab24520-90ef-493d-8437-3527ae6be963 req-59dee07e-50dc-4a2e-a7c5-3a56890b627b service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.145612] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654017, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457919} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.145935] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] ea2227ff-f694-4baa-af17-dc50338d8fa6/ea2227ff-f694-4baa-af17-dc50338d8fa6.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 678.146259] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 678.146555] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff8e312d-455d-4a5f-a9e4-e75206d92b10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.153483] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 678.153483] env[62974]: value = "task-2654018" [ 678.153483] env[62974]: _type = "Task" [ 678.153483] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.163155] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654018, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.511154] env[62974]: DEBUG nova.network.neutron [-] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.610465] env[62974]: DEBUG nova.network.neutron [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Successfully updated port: 9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 678.612334] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance d941a678-1b67-4e0f-8806-e6682ef21774 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 678.640504] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a8ac20f-4f42-47a6-ab74-c4265d837432 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.656886] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0956d2ab-2bd3-4a96-b3f2-37fe1128d8e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.680232] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654018, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087406} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.680753] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.681629] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d9805e-8cd2-4661-a2fa-aa6716a13381 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.695701] env[62974]: DEBUG nova.compute.manager [req-4ab24520-90ef-493d-8437-3527ae6be963 req-59dee07e-50dc-4a2e-a7c5-3a56890b627b service nova] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Detach interface failed, port_id=39515e98-a8f4-4af9-9948-b0a5d05d3188, reason: Instance 05742180-08db-45db-9ee0-e359aa8af2f0 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 678.716754] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] ea2227ff-f694-4baa-af17-dc50338d8fa6/ea2227ff-f694-4baa-af17-dc50338d8fa6.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 678.717173] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d847c96-1242-4312-9502-b906f1aa0e76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.734816] env[62974]: DEBUG nova.compute.manager [None req-a7c405f5-0f2a-4120-b129-8b9ddf35ccc4 tempest-ServerDiagnosticsV248Test-1434845971 tempest-ServerDiagnosticsV248Test-1434845971-project-admin] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.736434] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d5c272-d2a3-4869-83e3-92ece0ae0acd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.740501] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 678.740501] env[62974]: value = "task-2654019" [ 678.740501] env[62974]: _type = "Task" [ 678.740501] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.747521] env[62974]: INFO nova.compute.manager [None req-a7c405f5-0f2a-4120-b129-8b9ddf35ccc4 tempest-ServerDiagnosticsV248Test-1434845971 tempest-ServerDiagnosticsV248Test-1434845971-project-admin] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Retrieving diagnostics [ 678.748661] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ef7611-bd15-4ddc-8705-50b1df4b59f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.754664] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654019, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.013552] env[62974]: INFO nova.compute.manager [-] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Took 1.52 seconds to deallocate network for instance. [ 679.114319] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.114545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.114607] env[62974]: DEBUG nova.network.neutron [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.116586] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 8621428e-cf42-47a4-82c8-a003c377b257 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 679.252918] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654019, 'name': ReconfigVM_Task, 'duration_secs': 0.297086} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.253195] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Reconfigured VM instance instance-00000024 to attach disk [datastore1] ea2227ff-f694-4baa-af17-dc50338d8fa6/ea2227ff-f694-4baa-af17-dc50338d8fa6.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 679.253800] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d41f27b-f59d-44a9-b5b6-b93aeadd6134 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.265466] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 679.265466] env[62974]: value = "task-2654020" [ 679.265466] env[62974]: _type = "Task" [ 679.265466] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.277791] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654020, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.519759] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.620990] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance da43a464-ebae-4038-9f7b-330df22d8d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 679.654419] env[62974]: DEBUG nova.network.neutron [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.776216] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654020, 'name': Rename_Task, 'duration_secs': 0.156478} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.776689] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.776762] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6702211f-f129-492b-aa0c-3a6b93781500 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.783115] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 679.783115] env[62974]: value = "task-2654021" [ 679.783115] env[62974]: _type = "Task" [ 679.783115] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.790868] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654021, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.816288] env[62974]: DEBUG nova.network.neutron [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Updating instance_info_cache with network_info: [{"id": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "address": "fa:16:3e:50:78:60", "network": {"id": "3518cb90-bb1f-4059-9f82-0f81c27ff829", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-912534611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de9a9e0393b4445a0ce8b5cd0df272f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ad4fa58-ef", "ovs_interfaceid": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.962135] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "69597c3f-ccb2-474d-bb7c-629c5da0b456" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.962333] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "69597c3f-ccb2-474d-bb7c-629c5da0b456" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.962791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "69597c3f-ccb2-474d-bb7c-629c5da0b456-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.962791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "69597c3f-ccb2-474d-bb7c-629c5da0b456-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.962908] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "69597c3f-ccb2-474d-bb7c-629c5da0b456-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.967576] env[62974]: INFO nova.compute.manager [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Terminating instance [ 680.124437] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a14e7e40-afef-4607-8fa9-935a92ea49dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 680.286179] env[62974]: DEBUG nova.compute.manager [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Received event network-vif-plugged-9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 680.286411] env[62974]: DEBUG oslo_concurrency.lockutils [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] Acquiring lock "59ece0e8-85c2-499d-aba2-fd45fc116013-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.287024] env[62974]: DEBUG oslo_concurrency.lockutils [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.287024] env[62974]: DEBUG oslo_concurrency.lockutils [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.287024] env[62974]: DEBUG nova.compute.manager [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] No waiting events found dispatching network-vif-plugged-9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 680.287224] env[62974]: WARNING nova.compute.manager [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Received unexpected event network-vif-plugged-9ad4fa58-ef22-4d11-9cb7-041017dd38fc for instance with vm_state building and task_state spawning. [ 680.287391] env[62974]: DEBUG nova.compute.manager [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Received event network-changed-9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 680.287503] env[62974]: DEBUG nova.compute.manager [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Refreshing instance network info cache due to event network-changed-9ad4fa58-ef22-4d11-9cb7-041017dd38fc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 680.287674] env[62974]: DEBUG oslo_concurrency.lockutils [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] Acquiring lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.297625] env[62974]: DEBUG oslo_vmware.api [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654021, 'name': PowerOnVM_Task, 'duration_secs': 0.459699} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.297625] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 680.297860] env[62974]: INFO nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Took 6.86 seconds to spawn the instance on the hypervisor. [ 680.298054] env[62974]: DEBUG nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 680.298830] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd928324-9f69-42ca-abca-e5f681679feb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.322971] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Releasing lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.323292] env[62974]: DEBUG nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Instance network_info: |[{"id": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "address": "fa:16:3e:50:78:60", "network": {"id": "3518cb90-bb1f-4059-9f82-0f81c27ff829", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-912534611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de9a9e0393b4445a0ce8b5cd0df272f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ad4fa58-ef", "ovs_interfaceid": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 680.323660] env[62974]: DEBUG oslo_concurrency.lockutils [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] Acquired lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.323854] env[62974]: DEBUG nova.network.neutron [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Refreshing network info cache for port 9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.327716] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:78:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ad4fa58-ef22-4d11-9cb7-041017dd38fc', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.332722] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Creating folder: Project (9de9a9e0393b4445a0ce8b5cd0df272f). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.333498] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5909ae4-125f-460c-bde1-b2fae3474d43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.346151] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Created folder: Project (9de9a9e0393b4445a0ce8b5cd0df272f) in parent group-v535199. [ 680.346321] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Creating folder: Instances. Parent ref: group-v535307. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 680.347214] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ff99bc5-f529-4dcd-97ec-ac994e0efb89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.359452] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Created folder: Instances in parent group-v535307. [ 680.359452] env[62974]: DEBUG oslo.service.loopingcall [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.359452] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.359614] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abc01367-a8ec-4b30-8970-d15bfc0f7741 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.387372] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.387372] env[62974]: value = "task-2654024" [ 680.387372] env[62974]: _type = "Task" [ 680.387372] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.396829] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654024, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.473171] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "refresh_cache-69597c3f-ccb2-474d-bb7c-629c5da0b456" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.473367] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquired lock "refresh_cache-69597c3f-ccb2-474d-bb7c-629c5da0b456" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.473547] env[62974]: DEBUG nova.network.neutron [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.630125] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 680.646510] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "cf6e4f04-f5f4-46cb-884b-8014af903a10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.646510] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.815133] env[62974]: INFO nova.compute.manager [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Took 41.30 seconds to build instance. [ 680.904585] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654024, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.999185] env[62974]: DEBUG nova.network.neutron [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.084381] env[62974]: DEBUG nova.network.neutron [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.104296] env[62974]: DEBUG nova.network.neutron [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Updated VIF entry in instance network info cache for port 9ad4fa58-ef22-4d11-9cb7-041017dd38fc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 681.104636] env[62974]: DEBUG nova.network.neutron [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Updating instance_info_cache with network_info: [{"id": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "address": "fa:16:3e:50:78:60", "network": {"id": "3518cb90-bb1f-4059-9f82-0f81c27ff829", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-912534611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de9a9e0393b4445a0ce8b5cd0df272f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ad4fa58-ef", "ovs_interfaceid": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.133526] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 0c2642d5-85fe-4db5-9891-025c88ca8c7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 681.319230] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af82fc90-77a0-48e2-ac9a-8774a619a49e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.022s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.399014] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654024, 'name': CreateVM_Task, 'duration_secs': 0.589911} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.399168] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 681.402178] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.402178] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.402178] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 681.402178] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0de3601a-b111-4a5f-835a-11abeb681c8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.406500] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 681.406500] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fedcfb-6921-63fd-2931-39f515554083" [ 681.406500] env[62974]: _type = "Task" [ 681.406500] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.414972] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fedcfb-6921-63fd-2931-39f515554083, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.587835] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Releasing lock "refresh_cache-69597c3f-ccb2-474d-bb7c-629c5da0b456" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.588467] env[62974]: DEBUG nova.compute.manager [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 681.588771] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 681.589991] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70143ba-2082-4226-b51c-f58a0cd2e2a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.597920] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 681.598161] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b9688be-493f-4fba-a5e8-d164e4ae6044 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.603783] env[62974]: DEBUG oslo_vmware.api [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 681.603783] env[62974]: value = "task-2654025" [ 681.603783] env[62974]: _type = "Task" [ 681.603783] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.606796] env[62974]: DEBUG oslo_concurrency.lockutils [req-c75d2eb7-0ffd-46c7-a0d1-e7254fce3e7a req-b8ccb9a1-54e1-4dc4-9075-5049b9ddaf3d service nova] Releasing lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.611672] env[62974]: DEBUG oslo_vmware.api [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2654025, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.636539] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 3426d512-d54e-4852-8eca-8ba9f5fef418 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 681.644047] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "ea2227ff-f694-4baa-af17-dc50338d8fa6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.644159] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.644380] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "ea2227ff-f694-4baa-af17-dc50338d8fa6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.644616] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.644841] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.647105] env[62974]: INFO nova.compute.manager [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Terminating instance [ 681.744653] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "2ebb3385-4177-4506-a4b0-52b53405cf49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.744920] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.745805] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "2ebb3385-4177-4506-a4b0-52b53405cf49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.745805] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.745805] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.747973] env[62974]: INFO nova.compute.manager [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Terminating instance [ 681.821883] env[62974]: DEBUG nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 681.917178] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fedcfb-6921-63fd-2931-39f515554083, 'name': SearchDatastore_Task, 'duration_secs': 0.024183} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.917551] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.917799] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.918042] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.918190] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.918364] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.918677] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f31c1fb2-f616-43be-81d2-6001744cf8af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.929235] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.929415] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.930180] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8040c3bc-ac02-44d9-966f-30c25d74516d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.935579] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 681.935579] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525105e5-7d81-ca32-2c97-6b93c06de6df" [ 681.935579] env[62974]: _type = "Task" [ 681.935579] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.943009] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525105e5-7d81-ca32-2c97-6b93c06de6df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.113832] env[62974]: DEBUG oslo_vmware.api [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2654025, 'name': PowerOffVM_Task, 'duration_secs': 0.128173} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.113832] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 682.114021] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 682.114608] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7450a53-da39-4d9b-aea6-b9b24619e713 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.140160] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c002aec9-4fdf-45c9-9ef6-d196c4891e19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.144093] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 682.144314] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 682.144490] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Deleting the datastore file [datastore2] 69597c3f-ccb2-474d-bb7c-629c5da0b456 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 682.144795] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2882490a-92dc-46ab-81a4-6b0255c5d64a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.151803] env[62974]: DEBUG nova.compute.manager [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 682.151979] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.157767] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be57672d-5c9e-45c0-ada4-455d46d6e059 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.157767] env[62974]: DEBUG oslo_vmware.api [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for the task: (returnval){ [ 682.157767] env[62974]: value = "task-2654027" [ 682.157767] env[62974]: _type = "Task" [ 682.157767] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.165822] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.165822] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99097b34-5d22-4d1a-b0b0-c644cf9c307e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.169774] env[62974]: DEBUG oslo_vmware.api [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2654027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.174859] env[62974]: DEBUG oslo_vmware.api [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 682.174859] env[62974]: value = "task-2654028" [ 682.174859] env[62974]: _type = "Task" [ 682.174859] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.184346] env[62974]: DEBUG oslo_vmware.api [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.251936] env[62974]: DEBUG nova.compute.manager [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 682.252162] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.253238] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595def8d-40ea-411e-8c71-3e9c1ee6662e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.263098] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.263356] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39e5c9a5-4379-4ab1-82d0-981175778632 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.270361] env[62974]: DEBUG oslo_vmware.api [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 682.270361] env[62974]: value = "task-2654029" [ 682.270361] env[62974]: _type = "Task" [ 682.270361] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.278578] env[62974]: DEBUG oslo_vmware.api [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.345941] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.445184] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525105e5-7d81-ca32-2c97-6b93c06de6df, 'name': SearchDatastore_Task, 'duration_secs': 0.022635} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.445995] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a73a289-be55-42db-9461-e92e2f106c6b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.451417] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 682.451417] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523db473-2a8a-3387-895b-ca0ee9057c2d" [ 682.451417] env[62974]: _type = "Task" [ 682.451417] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.459289] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523db473-2a8a-3387-895b-ca0ee9057c2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.648461] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance bcacc508-b910-4144-bf0b-454b0928ca71 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.668236] env[62974]: DEBUG oslo_vmware.api [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Task: {'id': task-2654027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111291} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.668529] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 682.668701] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 682.668872] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 682.669049] env[62974]: INFO nova.compute.manager [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Took 1.08 seconds to destroy the instance on the hypervisor. [ 682.669286] env[62974]: DEBUG oslo.service.loopingcall [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.669470] env[62974]: DEBUG nova.compute.manager [-] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 682.669564] env[62974]: DEBUG nova.network.neutron [-] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.684247] env[62974]: DEBUG oslo_vmware.api [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654028, 'name': PowerOffVM_Task, 'duration_secs': 0.187899} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.684892] env[62974]: DEBUG nova.network.neutron [-] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.686450] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 682.686638] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 682.687073] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92196b0f-88bf-4fdc-914c-2ab2d93fce93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.751285] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 682.751565] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 682.751816] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleting the datastore file [datastore1] ea2227ff-f694-4baa-af17-dc50338d8fa6 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 682.752122] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d0122eb-a103-4c30-9ee8-7830916b2a73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.759192] env[62974]: DEBUG oslo_vmware.api [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 682.759192] env[62974]: value = "task-2654031" [ 682.759192] env[62974]: _type = "Task" [ 682.759192] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.766801] env[62974]: DEBUG oslo_vmware.api [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.778692] env[62974]: DEBUG oslo_vmware.api [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654029, 'name': PowerOffVM_Task, 'duration_secs': 0.218794} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.778899] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 682.779081] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 682.779324] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae3f2f99-f239-4a40-baaf-59a457025490 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.847341] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 682.847564] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 682.847816] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleting the datastore file [datastore2] 2ebb3385-4177-4506-a4b0-52b53405cf49 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 682.848121] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b61349c-98c8-4880-9ad5-3088e9b82d88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.854958] env[62974]: DEBUG oslo_vmware.api [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for the task: (returnval){ [ 682.854958] env[62974]: value = "task-2654033" [ 682.854958] env[62974]: _type = "Task" [ 682.854958] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.862832] env[62974]: DEBUG oslo_vmware.api [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.963455] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523db473-2a8a-3387-895b-ca0ee9057c2d, 'name': SearchDatastore_Task, 'duration_secs': 0.011305} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.963839] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.964111] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 59ece0e8-85c2-499d-aba2-fd45fc116013/59ece0e8-85c2-499d-aba2-fd45fc116013.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.964638] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29671e3b-ff3d-4dc7-8fe4-b63179d3a130 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.971335] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 682.971335] env[62974]: value = "task-2654034" [ 682.971335] env[62974]: _type = "Task" [ 682.971335] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.979359] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.151467] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 69fb00b3-6a41-4ef5-8876-6548cae31c07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.187946] env[62974]: DEBUG nova.network.neutron [-] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.271138] env[62974]: DEBUG oslo_vmware.api [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149792} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.271226] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.271392] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 683.271569] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.271744] env[62974]: INFO nova.compute.manager [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 683.271979] env[62974]: DEBUG oslo.service.loopingcall [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.272189] env[62974]: DEBUG nova.compute.manager [-] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 683.272300] env[62974]: DEBUG nova.network.neutron [-] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 683.365558] env[62974]: DEBUG oslo_vmware.api [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Task: {'id': task-2654033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162016} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.365853] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.366049] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 683.366258] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.366400] env[62974]: INFO nova.compute.manager [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Took 1.11 seconds to destroy the instance on the hypervisor. [ 683.366645] env[62974]: DEBUG oslo.service.loopingcall [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.366840] env[62974]: DEBUG nova.compute.manager [-] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 683.366933] env[62974]: DEBUG nova.network.neutron [-] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 683.481304] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654034, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483272} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.481575] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 59ece0e8-85c2-499d-aba2-fd45fc116013/59ece0e8-85c2-499d-aba2-fd45fc116013.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.481785] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.482036] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d70e0782-78ee-4835-ab5b-d7417a706329 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.488768] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 683.488768] env[62974]: value = "task-2654035" [ 683.488768] env[62974]: _type = "Task" [ 683.488768] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.496449] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654035, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.654979] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 4967d5be-6cd4-4f23-aca4-d9ae11112369 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.655324] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 683.655464] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 683.694267] env[62974]: INFO nova.compute.manager [-] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Took 1.02 seconds to deallocate network for instance. [ 683.972317] env[62974]: DEBUG nova.compute.manager [req-fa3ddb27-ee56-415b-a209-02ec8086929f req-8c687d5b-403d-4245-896b-7e676de142cb service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Received event network-vif-deleted-c24e33da-775e-48dc-8bc1-a5d5571cfdad {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 683.972317] env[62974]: INFO nova.compute.manager [req-fa3ddb27-ee56-415b-a209-02ec8086929f req-8c687d5b-403d-4245-896b-7e676de142cb service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Neutron deleted interface c24e33da-775e-48dc-8bc1-a5d5571cfdad; detaching it from the instance and deleting it from the info cache [ 683.972317] env[62974]: DEBUG nova.network.neutron [req-fa3ddb27-ee56-415b-a209-02ec8086929f req-8c687d5b-403d-4245-896b-7e676de142cb service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.001581] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654035, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.172972} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.001890] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 684.002677] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1dcf78-2b20-477f-b007-a783590ca082 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.031389] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 59ece0e8-85c2-499d-aba2-fd45fc116013/59ece0e8-85c2-499d-aba2-fd45fc116013.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 684.033688] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f6d1def-c2c3-4f23-b357-7b1231064bac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.055067] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 684.055067] env[62974]: value = "task-2654036" [ 684.055067] env[62974]: _type = "Task" [ 684.055067] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.067570] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654036, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.203297] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.221837] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdc8a75-0336-4d3b-97dc-93c4e66a10b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.230661] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea25e010-b1b2-435a-b145-e0d6e5fde500 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.261578] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dde9234-293d-474a-b4d1-528057bce02e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.269114] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60d45fb-463a-48ce-9d70-338784fe3d84 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.282593] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 684.333100] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Acquiring lock "1933bc47-1717-48c1-b4a2-492a17573de7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.333333] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.409022] env[62974]: DEBUG nova.network.neutron [-] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.415895] env[62974]: DEBUG nova.network.neutron [-] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.476703] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a753ec56-ff22-40ed-b97f-440c41046d06 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.485874] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e5168a-69e0-4e38-9be1-3bafb95856ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.516686] env[62974]: DEBUG nova.compute.manager [req-fa3ddb27-ee56-415b-a209-02ec8086929f req-8c687d5b-403d-4245-896b-7e676de142cb service nova] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Detach interface failed, port_id=c24e33da-775e-48dc-8bc1-a5d5571cfdad, reason: Instance 2ebb3385-4177-4506-a4b0-52b53405cf49 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 684.564956] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.815110] env[62974]: ERROR nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [req-5ede1d68-a2eb-4c15-b501-6d9f5a616ae3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5ede1d68-a2eb-4c15-b501-6d9f5a616ae3"}]} [ 684.833493] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 684.837796] env[62974]: DEBUG nova.compute.utils [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.858899] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 684.859261] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 684.873498] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 684.894031] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 684.912774] env[62974]: INFO nova.compute.manager [-] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Took 1.55 seconds to deallocate network for instance. [ 684.920806] env[62974]: INFO nova.compute.manager [-] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Took 1.65 seconds to deallocate network for instance. [ 685.067511] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654036, 'name': ReconfigVM_Task, 'duration_secs': 0.598565} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.067801] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 59ece0e8-85c2-499d-aba2-fd45fc116013/59ece0e8-85c2-499d-aba2-fd45fc116013.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 685.069029] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de8e30c9-1a98-42ff-bf5e-0fa63e5ab681 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.073941] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 685.073941] env[62974]: value = "task-2654037" [ 685.073941] env[62974]: _type = "Task" [ 685.073941] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.081474] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654037, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.332905] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84dba66f-90dc-490c-a8fa-09857f853383 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.340043] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b680a35b-a247-42a7-87fe-59a9580d52da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.344297] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.371675] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eeeab1c-1fa8-4eb7-a809-c7dee77e2c00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.377807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbf3bb2-dadf-47f7-a4a8-e4f22e1ba2f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.391683] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.422848] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.430912] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.583592] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654037, 'name': Rename_Task, 'duration_secs': 0.141335} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.583859] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 685.584116] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d4581cb-8fc0-4d20-95d1-f683880d64a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.589606] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 685.589606] env[62974]: value = "task-2654038" [ 685.589606] env[62974]: _type = "Task" [ 685.589606] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.596759] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654038, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.924592] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 62 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 685.924945] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 62 to 63 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 685.925198] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 686.100348] env[62974]: DEBUG oslo_vmware.api [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654038, 'name': PowerOnVM_Task, 'duration_secs': 0.460947} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.100348] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 686.100348] env[62974]: INFO nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Took 8.03 seconds to spawn the instance on the hypervisor. [ 686.100348] env[62974]: DEBUG nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 686.101022] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5076b2b0-d714-4300-a306-57c767580ae8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.125347] env[62974]: DEBUG nova.compute.manager [req-b107cfec-d2b4-4ae9-ac55-8c3ee261d9ef req-1a5bf8b7-769a-495f-9dc7-cb9e51667dfd service nova] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Received event network-vif-deleted-c7896e84-7bff-4c8c-9ec7-6a03b6c23e3f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 686.422703] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Acquiring lock "1933bc47-1717-48c1-b4a2-492a17573de7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.423170] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.423533] env[62974]: INFO nova.compute.manager [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Attaching volume 0b736374-ca53-4b30-b016-93a9c0d04afd to /dev/sdb [ 686.443345] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 686.443830] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.415s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.444537] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.444s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.445921] env[62974]: INFO nova.compute.claims [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.474208] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66c3e7d-cbf1-4e92-ba47-28cff3fe40ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.482154] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96e8e3d-4bf3-4b8b-ac3f-64fa62994b37 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.497974] env[62974]: DEBUG nova.virt.block_device [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updating existing volume attachment record: 385a31c0-11ed-4a2a-9ee4-bdc017190ca0 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 686.623419] env[62974]: INFO nova.compute.manager [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Took 44.47 seconds to build instance. [ 687.126707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-59645bef-f094-452c-b1f3-26bcef8819a5 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.228s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.631205] env[62974]: DEBUG nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 687.654020] env[62974]: DEBUG nova.compute.manager [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Received event network-changed-9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 687.654020] env[62974]: DEBUG nova.compute.manager [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Refreshing instance network info cache due to event network-changed-9ad4fa58-ef22-4d11-9cb7-041017dd38fc. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 687.654020] env[62974]: DEBUG oslo_concurrency.lockutils [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] Acquiring lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.654020] env[62974]: DEBUG oslo_concurrency.lockutils [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] Acquired lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.654483] env[62974]: DEBUG nova.network.neutron [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Refreshing network info cache for port 9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.968922] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659ec530-e0c1-4424-a078-3260997f46b6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.976847] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b11c8d-92b2-48eb-8559-724f80f9a43a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.011190] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f8e3d0-3217-41ff-bf15-6390b340980d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.018791] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b61ddc-f920-4a7c-8e11-3331f52daff0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.032193] env[62974]: DEBUG nova.compute.provider_tree [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.151486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.371865] env[62974]: DEBUG nova.network.neutron [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Updated VIF entry in instance network info cache for port 9ad4fa58-ef22-4d11-9cb7-041017dd38fc. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 688.372216] env[62974]: DEBUG nova.network.neutron [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Updating instance_info_cache with network_info: [{"id": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "address": "fa:16:3e:50:78:60", "network": {"id": "3518cb90-bb1f-4059-9f82-0f81c27ff829", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-912534611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de9a9e0393b4445a0ce8b5cd0df272f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ad4fa58-ef", "ovs_interfaceid": "9ad4fa58-ef22-4d11-9cb7-041017dd38fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.535332] env[62974]: DEBUG nova.scheduler.client.report [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.580717] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "f9adcd7e-58a0-433c-8602-cca814b84aaa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.581012] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.581243] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "f9adcd7e-58a0-433c-8602-cca814b84aaa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.581449] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.581621] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.583804] env[62974]: INFO nova.compute.manager [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Terminating instance [ 688.875364] env[62974]: DEBUG oslo_concurrency.lockutils [req-57d688e6-141b-4159-9b38-027d748ebbe8 req-4758afca-c092-4072-afff-380e4f2f5771 service nova] Releasing lock "refresh_cache-59ece0e8-85c2-499d-aba2-fd45fc116013" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.040366] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.040740] env[62974]: DEBUG nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 689.043421] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.496s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.044049] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.046101] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.052s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.046286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.047983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.369s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.049379] env[62974]: INFO nova.compute.claims [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 689.070672] env[62974]: INFO nova.scheduler.client.report [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted allocations for instance 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7 [ 689.070672] env[62974]: INFO nova.scheduler.client.report [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Deleted allocations for instance a7a014b9-10e1-45a0-85da-4754051e8d82 [ 689.093298] env[62974]: DEBUG nova.compute.manager [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 689.093523] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 689.094443] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0494bc1-6d42-4eeb-9cf3-3c4dc334e8f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.102247] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.102485] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33537279-c8e6-4af6-bb19-c2177d2edea1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.108884] env[62974]: DEBUG oslo_vmware.api [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 689.108884] env[62974]: value = "task-2654043" [ 689.108884] env[62974]: _type = "Task" [ 689.108884] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.117607] env[62974]: DEBUG oslo_vmware.api [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654043, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.235223] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.235541] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.555629] env[62974]: DEBUG nova.compute.utils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.557036] env[62974]: DEBUG nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.557143] env[62974]: DEBUG nova.network.neutron [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.580358] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8171056-7436-4ca4-b978-6189c718e1cb tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.038s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.587175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4d393486-d7cf-42d6-be28-7fa912094a1f tempest-ImagesNegativeTestJSON-2116110957 tempest-ImagesNegativeTestJSON-2116110957-project-member] Lock "a7a014b9-10e1-45a0-85da-4754051e8d82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.452s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.599839] env[62974]: DEBUG nova.policy [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7598ef392db401c8bafff86ef524f74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4598f0721bf64ba4b3aac7af57f60f02', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.621509] env[62974]: DEBUG oslo_vmware.api [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654043, 'name': PowerOffVM_Task, 'duration_secs': 0.207002} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.622106] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 689.622413] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 689.622684] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e2dc355-e6dc-4784-90e3-267018b74f07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.694249] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 689.694249] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 689.694249] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Deleting the datastore file [datastore1] f9adcd7e-58a0-433c-8602-cca814b84aaa {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 689.694249] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-694229f4-6561-4748-84cf-c743cf6067d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.701335] env[62974]: DEBUG oslo_vmware.api [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 689.701335] env[62974]: value = "task-2654045" [ 689.701335] env[62974]: _type = "Task" [ 689.701335] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.709323] env[62974]: DEBUG oslo_vmware.api [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.904330] env[62974]: DEBUG nova.network.neutron [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Successfully created port: 30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.064938] env[62974]: DEBUG nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 690.212981] env[62974]: DEBUG oslo_vmware.api [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150976} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.213252] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 690.213433] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 690.213605] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.213778] env[62974]: INFO nova.compute.manager [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 690.214026] env[62974]: DEBUG oslo.service.loopingcall [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.214230] env[62974]: DEBUG nova.compute.manager [-] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 690.214324] env[62974]: DEBUG nova.network.neutron [-] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 690.607351] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec7075b-af7c-47dc-80e4-ce6047a6e745 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.618976] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c736b7f1-980d-4eb0-a313-e3c3f184e13c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.651651] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3831f2f2-a5ba-495c-beca-5527363114ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.663204] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ee6f6d-c1df-4a7f-a5d1-5866e77464af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.676567] env[62974]: DEBUG nova.compute.provider_tree [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.932043] env[62974]: DEBUG nova.compute.manager [req-8c4c6606-4664-47c4-9c5e-23ff2c89e4bc req-6d37ddd1-4c86-4095-9c88-59abef58a23a service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Received event network-vif-deleted-8e95b6b2-a646-4f70-9191-7305ffd14c84 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 690.932558] env[62974]: INFO nova.compute.manager [req-8c4c6606-4664-47c4-9c5e-23ff2c89e4bc req-6d37ddd1-4c86-4095-9c88-59abef58a23a service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Neutron deleted interface 8e95b6b2-a646-4f70-9191-7305ffd14c84; detaching it from the instance and deleting it from the info cache [ 690.932853] env[62974]: DEBUG nova.network.neutron [req-8c4c6606-4664-47c4-9c5e-23ff2c89e4bc req-6d37ddd1-4c86-4095-9c88-59abef58a23a service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.078080] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 691.078503] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535312', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'name': 'volume-0b736374-ca53-4b30-b016-93a9c0d04afd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1933bc47-1717-48c1-b4a2-492a17573de7', 'attached_at': '', 'detached_at': '', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'serial': '0b736374-ca53-4b30-b016-93a9c0d04afd'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 691.079585] env[62974]: DEBUG nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 691.083228] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2dd01a-2bea-4c19-9b13-cb5182a36470 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.107691] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea2a7d3-f90d-4d27-98ce-569191fe1413 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.133867] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] volume-0b736374-ca53-4b30-b016-93a9c0d04afd/volume-0b736374-ca53-4b30-b016-93a9c0d04afd.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 691.136795] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 691.136795] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.136918] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 691.137106] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.137283] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 691.137454] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 691.141009] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 691.141009] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 691.141009] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 691.141009] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 691.141009] env[62974]: DEBUG nova.virt.hardware [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 691.141264] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bef1ce65-ec93-48b5-888b-d7d8dc5550ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.153808] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80411414-859c-4075-8481-5206c7008494 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.165930] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc7cde8-c0f2-4797-8795-629f69ee8c44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.166864] env[62974]: DEBUG oslo_vmware.api [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Waiting for the task: (returnval){ [ 691.166864] env[62974]: value = "task-2654046" [ 691.166864] env[62974]: _type = "Task" [ 691.166864] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.181268] env[62974]: DEBUG nova.scheduler.client.report [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.184931] env[62974]: DEBUG nova.network.neutron [-] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.189309] env[62974]: DEBUG oslo_vmware.api [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654046, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.440708] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-066c4dd0-1881-4134-99a3-d38d89080dd4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.448384] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ddbeb8-baa6-411e-a95c-f4d172a24ce2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.480229] env[62974]: DEBUG nova.compute.manager [req-8c4c6606-4664-47c4-9c5e-23ff2c89e4bc req-6d37ddd1-4c86-4095-9c88-59abef58a23a service nova] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Detach interface failed, port_id=8e95b6b2-a646-4f70-9191-7305ffd14c84, reason: Instance f9adcd7e-58a0-433c-8602-cca814b84aaa could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 691.481519] env[62974]: DEBUG nova.network.neutron [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Successfully updated port: 30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 691.653446] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.653903] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.681883] env[62974]: DEBUG oslo_vmware.api [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654046, 'name': ReconfigVM_Task, 'duration_secs': 0.360186} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.682871] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Reconfigured VM instance instance-0000000e to attach disk [datastore1] volume-0b736374-ca53-4b30-b016-93a9c0d04afd/volume-0b736374-ca53-4b30-b016-93a9c0d04afd.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.688322] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01a6d15c-a772-4f93-8a71-da203d169932 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.699406] env[62974]: INFO nova.compute.manager [-] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Took 1.48 seconds to deallocate network for instance. [ 691.700126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.700593] env[62974]: DEBUG nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 691.704329] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 42.889s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.704517] env[62974]: DEBUG nova.objects.instance [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 691.712256] env[62974]: DEBUG oslo_vmware.api [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Waiting for the task: (returnval){ [ 691.712256] env[62974]: value = "task-2654047" [ 691.712256] env[62974]: _type = "Task" [ 691.712256] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.722126] env[62974]: DEBUG oslo_vmware.api [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654047, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.983941] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.984117] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquired lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.984270] env[62974]: DEBUG nova.network.neutron [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.208945] env[62974]: DEBUG nova.compute.utils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 692.210594] env[62974]: DEBUG nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 692.210594] env[62974]: DEBUG nova.network.neutron [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 692.218286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.229133] env[62974]: DEBUG oslo_vmware.api [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654047, 'name': ReconfigVM_Task, 'duration_secs': 0.139327} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.229427] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535312', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'name': 'volume-0b736374-ca53-4b30-b016-93a9c0d04afd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1933bc47-1717-48c1-b4a2-492a17573de7', 'attached_at': '', 'detached_at': '', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'serial': '0b736374-ca53-4b30-b016-93a9c0d04afd'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 692.266424] env[62974]: DEBUG nova.policy [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84861fd0e88640529eb573045514dff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39e59f58f7c24529bfce4bcc18cc7925', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 692.534201] env[62974]: DEBUG nova.network.neutron [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.720902] env[62974]: DEBUG nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 692.722438] env[62974]: DEBUG oslo_concurrency.lockutils [None req-43e4bc90-5cae-4810-87d5-c330da840dfd tempest-ServersAdmin275Test-246102671 tempest-ServersAdmin275Test-246102671-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.726068] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.947s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.727773] env[62974]: INFO nova.compute.claims [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.753352] env[62974]: DEBUG nova.network.neutron [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Successfully created port: b71f7882-2184-4093-856d-a4bbc389dd03 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 692.769867] env[62974]: DEBUG nova.network.neutron [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Updating instance_info_cache with network_info: [{"id": "30d94be9-1598-40ca-95b1-cfe821557367", "address": "fa:16:3e:c9:b6:f3", "network": {"id": "435a92ee-f960-40ce-986f-266bccc22f8f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1755820183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4598f0721bf64ba4b3aac7af57f60f02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d94be9-15", "ovs_interfaceid": "30d94be9-1598-40ca-95b1-cfe821557367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.996145] env[62974]: DEBUG nova.compute.manager [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Received event network-vif-plugged-30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 692.996257] env[62974]: DEBUG oslo_concurrency.lockutils [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] Acquiring lock "c763d45b-44f0-4557-a726-7aad2bc58ba8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.996571] env[62974]: DEBUG oslo_concurrency.lockutils [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.996945] env[62974]: DEBUG oslo_concurrency.lockutils [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.996945] env[62974]: DEBUG nova.compute.manager [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] No waiting events found dispatching network-vif-plugged-30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 692.996945] env[62974]: WARNING nova.compute.manager [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Received unexpected event network-vif-plugged-30d94be9-1598-40ca-95b1-cfe821557367 for instance with vm_state building and task_state spawning. [ 692.997320] env[62974]: DEBUG nova.compute.manager [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Received event network-changed-30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 692.997320] env[62974]: DEBUG nova.compute.manager [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Refreshing instance network info cache due to event network-changed-30d94be9-1598-40ca-95b1-cfe821557367. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 692.997692] env[62974]: DEBUG oslo_concurrency.lockutils [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] Acquiring lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.273728] env[62974]: DEBUG nova.objects.instance [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lazy-loading 'flavor' on Instance uuid 1933bc47-1717-48c1-b4a2-492a17573de7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 693.278025] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Releasing lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.278025] env[62974]: DEBUG nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Instance network_info: |[{"id": "30d94be9-1598-40ca-95b1-cfe821557367", "address": "fa:16:3e:c9:b6:f3", "network": {"id": "435a92ee-f960-40ce-986f-266bccc22f8f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1755820183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4598f0721bf64ba4b3aac7af57f60f02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d94be9-15", "ovs_interfaceid": "30d94be9-1598-40ca-95b1-cfe821557367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 693.278217] env[62974]: DEBUG oslo_concurrency.lockutils [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] Acquired lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.278616] env[62974]: DEBUG nova.network.neutron [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Refreshing network info cache for port 30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.279375] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:b6:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30d94be9-1598-40ca-95b1-cfe821557367', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.289542] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Creating folder: Project (4598f0721bf64ba4b3aac7af57f60f02). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.292450] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cffaca82-7f8f-4edc-9234-a8148c0a3b9c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.309378] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Created folder: Project (4598f0721bf64ba4b3aac7af57f60f02) in parent group-v535199. [ 693.309561] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Creating folder: Instances. Parent ref: group-v535313. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.309803] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c12b3dca-6e59-4688-8690-f90ed982b904 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.319794] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Created folder: Instances in parent group-v535313. [ 693.319794] env[62974]: DEBUG oslo.service.loopingcall [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.319794] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.319794] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6b48b48-9ed6-4e7e-bb89-904ecb3822b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.344833] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.344833] env[62974]: value = "task-2654050" [ 693.344833] env[62974]: _type = "Task" [ 693.344833] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.352512] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654050, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.521049] env[62974]: DEBUG nova.network.neutron [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Updated VIF entry in instance network info cache for port 30d94be9-1598-40ca-95b1-cfe821557367. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 693.521437] env[62974]: DEBUG nova.network.neutron [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Updating instance_info_cache with network_info: [{"id": "30d94be9-1598-40ca-95b1-cfe821557367", "address": "fa:16:3e:c9:b6:f3", "network": {"id": "435a92ee-f960-40ce-986f-266bccc22f8f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1755820183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4598f0721bf64ba4b3aac7af57f60f02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d94be9-15", "ovs_interfaceid": "30d94be9-1598-40ca-95b1-cfe821557367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.731029] env[62974]: DEBUG nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 693.758680] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 693.758921] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 693.759088] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 693.759275] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 693.759421] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 693.759657] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 693.759901] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 693.760611] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 693.760799] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 693.760981] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 693.761171] env[62974]: DEBUG nova.virt.hardware [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 693.762027] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9971cd2-0825-467e-b16c-6d5d3c1ec330 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.771259] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f404317a-e7a8-49eb-b478-897f39b3010d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.781125] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8dc40fcc-7215-4d55-bfb8-e2ebc95c2775 tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.358s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.859391] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654050, 'name': CreateVM_Task, 'duration_secs': 0.330815} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.862275] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.863503] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.863808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.864247] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 693.864614] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4d2d58e-d17b-43bb-aab7-cf0b1e1766d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.870043] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 693.870043] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5260a42d-9e09-5bd6-b1b1-835f78cb8ab0" [ 693.870043] env[62974]: _type = "Task" [ 693.870043] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.878940] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5260a42d-9e09-5bd6-b1b1-835f78cb8ab0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.025261] env[62974]: DEBUG oslo_concurrency.lockutils [req-dec389f8-0224-4df7-bb41-a4e82f20d8ed req-c030172f-f777-439c-8a71-34eefdc8ecd3 service nova] Releasing lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.304963] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6edb2b-56c1-4836-a585-0a045aa53f8b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.312972] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77df90c6-9e83-45cd-bda3-7880dc9a8cfa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.346558] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7439cd2e-c0f2-4349-bcd4-4abebf6ea666 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.353104] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e07f8da-7f98-46de-b086-cfa992c5a3b0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.371618] env[62974]: DEBUG nova.compute.provider_tree [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.380973] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5260a42d-9e09-5bd6-b1b1-835f78cb8ab0, 'name': SearchDatastore_Task, 'duration_secs': 0.009241} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.381311] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.381535] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.381753] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.381902] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.382089] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.382341] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-748619e9-cb0a-4ded-bbf3-dbbe1b3bd172 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.396127] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.396127] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.396650] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2a1f7c2-51b3-4b82-8fcc-d92618e2664b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.402022] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 694.402022] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52605a90-09eb-0b28-b342-7e02a134ac01" [ 694.402022] env[62974]: _type = "Task" [ 694.402022] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.411044] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52605a90-09eb-0b28-b342-7e02a134ac01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.458314] env[62974]: DEBUG nova.network.neutron [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Successfully updated port: b71f7882-2184-4093-856d-a4bbc389dd03 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 694.512311] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Acquiring lock "1933bc47-1717-48c1-b4a2-492a17573de7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.512519] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.877945] env[62974]: DEBUG nova.scheduler.client.report [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.914079] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52605a90-09eb-0b28-b342-7e02a134ac01, 'name': SearchDatastore_Task, 'duration_secs': 0.008687} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.914699] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3df3b320-5c46-45ed-bae7-52975e07530b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.920990] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 694.920990] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d18db-f01f-afb5-afb8-e9908f86245c" [ 694.920990] env[62974]: _type = "Task" [ 694.920990] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.929554] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d18db-f01f-afb5-afb8-e9908f86245c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.961015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-669cd72c-556f-40b6-8bc2-f50a125c182a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.961197] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-669cd72c-556f-40b6-8bc2-f50a125c182a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.961354] env[62974]: DEBUG nova.network.neutron [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.017644] env[62974]: INFO nova.compute.manager [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Detaching volume 0b736374-ca53-4b30-b016-93a9c0d04afd [ 695.062489] env[62974]: INFO nova.virt.block_device [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Attempting to driver detach volume 0b736374-ca53-4b30-b016-93a9c0d04afd from mountpoint /dev/sdb [ 695.062489] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 695.062489] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535312', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'name': 'volume-0b736374-ca53-4b30-b016-93a9c0d04afd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1933bc47-1717-48c1-b4a2-492a17573de7', 'attached_at': '', 'detached_at': '', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'serial': '0b736374-ca53-4b30-b016-93a9c0d04afd'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 695.062489] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c990b39-2e0f-4f86-9897-13554dfe2ad0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.091638] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e3e662-2db7-43a6-87b3-6a2fddb689a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.099770] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4410228d-ac8d-48d0-86c3-a033016151e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.123077] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b42d12-2361-4788-b0ca-d3cd0998e7b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.139080] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] The volume has not been displaced from its original location: [datastore1] volume-0b736374-ca53-4b30-b016-93a9c0d04afd/volume-0b736374-ca53-4b30-b016-93a9c0d04afd.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 695.144451] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Reconfiguring VM instance instance-0000000e to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 695.145154] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb94a0e4-fc07-4a18-aaae-c7c41f0d3abd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.169020] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Waiting for the task: (returnval){ [ 695.169020] env[62974]: value = "task-2654051" [ 695.169020] env[62974]: _type = "Task" [ 695.169020] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.178420] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654051, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.385936] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.385936] env[62974]: DEBUG nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 695.388273] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.566s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.388504] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.390657] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.520s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.393050] env[62974]: INFO nova.compute.claims [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.434645] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d18db-f01f-afb5-afb8-e9908f86245c, 'name': SearchDatastore_Task, 'duration_secs': 0.011039} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.434645] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.434645] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c763d45b-44f0-4557-a726-7aad2bc58ba8/c763d45b-44f0-4557-a726-7aad2bc58ba8.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.435203] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c419227e-b73b-41af-b783-d38d6782d6fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.441576] env[62974]: INFO nova.scheduler.client.report [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Deleted allocations for instance 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e [ 695.452047] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 695.452047] env[62974]: value = "task-2654052" [ 695.452047] env[62974]: _type = "Task" [ 695.452047] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.461775] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.490043] env[62974]: DEBUG nova.compute.manager [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Received event network-vif-plugged-b71f7882-2184-4093-856d-a4bbc389dd03 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 695.490261] env[62974]: DEBUG oslo_concurrency.lockutils [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] Acquiring lock "669cd72c-556f-40b6-8bc2-f50a125c182a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.490496] env[62974]: DEBUG oslo_concurrency.lockutils [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.490664] env[62974]: DEBUG oslo_concurrency.lockutils [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.490841] env[62974]: DEBUG nova.compute.manager [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] No waiting events found dispatching network-vif-plugged-b71f7882-2184-4093-856d-a4bbc389dd03 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 695.491027] env[62974]: WARNING nova.compute.manager [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Received unexpected event network-vif-plugged-b71f7882-2184-4093-856d-a4bbc389dd03 for instance with vm_state building and task_state spawning. [ 695.491234] env[62974]: DEBUG nova.compute.manager [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Received event network-changed-b71f7882-2184-4093-856d-a4bbc389dd03 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 695.491428] env[62974]: DEBUG nova.compute.manager [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Refreshing instance network info cache due to event network-changed-b71f7882-2184-4093-856d-a4bbc389dd03. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 695.491612] env[62974]: DEBUG oslo_concurrency.lockutils [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] Acquiring lock "refresh_cache-669cd72c-556f-40b6-8bc2-f50a125c182a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.517299] env[62974]: DEBUG nova.network.neutron [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.678284] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654051, 'name': ReconfigVM_Task, 'duration_secs': 0.236155} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.678692] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Reconfigured VM instance instance-0000000e to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 695.684730] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8029e8af-65ad-47b6-a1ee-dbde567b99f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.700939] env[62974]: DEBUG nova.network.neutron [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Updating instance_info_cache with network_info: [{"id": "b71f7882-2184-4093-856d-a4bbc389dd03", "address": "fa:16:3e:9d:25:d6", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb71f7882-21", "ovs_interfaceid": "b71f7882-2184-4093-856d-a4bbc389dd03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.708173] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Waiting for the task: (returnval){ [ 695.708173] env[62974]: value = "task-2654053" [ 695.708173] env[62974]: _type = "Task" [ 695.708173] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.721151] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.898341] env[62974]: DEBUG nova.compute.utils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 695.904902] env[62974]: DEBUG nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.904902] env[62974]: DEBUG nova.network.neutron [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.958420] env[62974]: DEBUG oslo_concurrency.lockutils [None req-866edb4f-282f-44be-ae44-0098b4d4c93a tempest-ServersAdminNegativeTestJSON-782001048 tempest-ServersAdminNegativeTestJSON-782001048-project-member] Lock "2174cb7d-3e73-4529-b9f8-735dd6dfcf4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.534s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.964146] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654052, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461109} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.965519] env[62974]: DEBUG nova.policy [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e07ae60010640d88de0d3b716914186', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd914830aaf454e26b77cbb46722764ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.967220] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c763d45b-44f0-4557-a726-7aad2bc58ba8/c763d45b-44f0-4557-a726-7aad2bc58ba8.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 695.967550] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.967833] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98eee0b2-687f-433a-9b5f-a4a2c7220161 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.975814] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 695.975814] env[62974]: value = "task-2654054" [ 695.975814] env[62974]: _type = "Task" [ 695.975814] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.985080] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.205699] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-669cd72c-556f-40b6-8bc2-f50a125c182a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.206065] env[62974]: DEBUG nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Instance network_info: |[{"id": "b71f7882-2184-4093-856d-a4bbc389dd03", "address": "fa:16:3e:9d:25:d6", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb71f7882-21", "ovs_interfaceid": "b71f7882-2184-4093-856d-a4bbc389dd03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 696.206377] env[62974]: DEBUG oslo_concurrency.lockutils [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] Acquired lock "refresh_cache-669cd72c-556f-40b6-8bc2-f50a125c182a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.206556] env[62974]: DEBUG nova.network.neutron [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Refreshing network info cache for port b71f7882-2184-4093-856d-a4bbc389dd03 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 696.207776] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:25:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b71f7882-2184-4093-856d-a4bbc389dd03', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 696.219250] env[62974]: DEBUG oslo.service.loopingcall [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 696.226826] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 696.230646] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8541edb7-a665-4898-8061-9f5aa1e75c04 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.254677] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654053, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.255936] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 696.255936] env[62974]: value = "task-2654055" [ 696.255936] env[62974]: _type = "Task" [ 696.255936] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.263761] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654055, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.350130] env[62974]: DEBUG nova.network.neutron [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Successfully created port: 3c57614f-5d9e-48de-b1c1-03931a43e20e {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.407657] env[62974]: DEBUG nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 696.490104] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135915} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.490104] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.492144] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad19582-d3e6-42f6-a7db-f141e2d2ecf4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.522049] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] c763d45b-44f0-4557-a726-7aad2bc58ba8/c763d45b-44f0-4557-a726-7aad2bc58ba8.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 696.527221] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a949634b-6a76-4195-aa24-f2c765089d9c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.551772] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 696.551772] env[62974]: value = "task-2654056" [ 696.551772] env[62974]: _type = "Task" [ 696.551772] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.564877] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654056, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.609025] env[62974]: DEBUG nova.network.neutron [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Updated VIF entry in instance network info cache for port b71f7882-2184-4093-856d-a4bbc389dd03. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 696.610605] env[62974]: DEBUG nova.network.neutron [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Updating instance_info_cache with network_info: [{"id": "b71f7882-2184-4093-856d-a4bbc389dd03", "address": "fa:16:3e:9d:25:d6", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb71f7882-21", "ovs_interfaceid": "b71f7882-2184-4093-856d-a4bbc389dd03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.737656] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654053, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.766819] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654055, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.880663] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.880663] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.063637] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.066347] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc52401-e72d-4ccc-b626-7fb282a2cf67 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.074263] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bb3631-36ec-4b83-b8f9-a6a9a880f909 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.111477] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58d36c2-7375-4064-9bad-30de9f3bfbf2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.114265] env[62974]: DEBUG oslo_concurrency.lockutils [req-666b34db-8853-4d8d-859a-d1d787e45dc6 req-78d2f49e-9a08-401c-8fd5-7fd792a804ec service nova] Releasing lock "refresh_cache-669cd72c-556f-40b6-8bc2-f50a125c182a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.119962] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f640be-784f-4689-bea6-569783946f18 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.135091] env[62974]: DEBUG nova.compute.provider_tree [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.236402] env[62974]: DEBUG oslo_vmware.api [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Task: {'id': task-2654053, 'name': ReconfigVM_Task, 'duration_secs': 1.167816} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.236693] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535312', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'name': 'volume-0b736374-ca53-4b30-b016-93a9c0d04afd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1933bc47-1717-48c1-b4a2-492a17573de7', 'attached_at': '', 'detached_at': '', 'volume_id': '0b736374-ca53-4b30-b016-93a9c0d04afd', 'serial': '0b736374-ca53-4b30-b016-93a9c0d04afd'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 697.267886] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654055, 'name': CreateVM_Task, 'duration_secs': 0.907347} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.268193] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 697.268849] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.268964] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.269187] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 697.269886] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa398706-363a-435a-82ed-09216f05f4a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.274597] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 697.274597] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e17038-90c1-db11-84e6-e66df1be5e62" [ 697.274597] env[62974]: _type = "Task" [ 697.274597] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.282312] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e17038-90c1-db11-84e6-e66df1be5e62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.423279] env[62974]: DEBUG nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 697.450481] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 697.451085] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.451085] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 697.451189] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.452200] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 697.452200] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 697.452200] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 697.452200] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 697.452200] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 697.452560] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 697.456019] env[62974]: DEBUG nova.virt.hardware [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 697.456019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba6c50c-6459-40a6-9c6f-27fd0fd541b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.468285] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d94acc-6879-41f0-b4e0-352aeb2c3567 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.562659] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654056, 'name': ReconfigVM_Task, 'duration_secs': 0.697276} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.562938] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Reconfigured VM instance instance-00000026 to attach disk [datastore1] c763d45b-44f0-4557-a726-7aad2bc58ba8/c763d45b-44f0-4557-a726-7aad2bc58ba8.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 697.563569] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fbafc61-6f0e-47a1-bf15-2fff7a6ee849 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.570063] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 697.570063] env[62974]: value = "task-2654057" [ 697.570063] env[62974]: _type = "Task" [ 697.570063] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.580331] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654057, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.638616] env[62974]: DEBUG nova.scheduler.client.report [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.787485] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e17038-90c1-db11-84e6-e66df1be5e62, 'name': SearchDatastore_Task, 'duration_secs': 0.00984} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.787832] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.788072] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 697.788303] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.788446] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.788617] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 697.788918] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a83fea20-6226-40fb-abd8-7d20d95b005b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.796890] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 697.797076] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 697.797786] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f0f57ca-e148-4c5f-8ef9-90cf66204f0b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.805166] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 697.805166] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527852e6-c40b-0b44-d19f-62bb1c53aec0" [ 697.805166] env[62974]: _type = "Task" [ 697.805166] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.812376] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527852e6-c40b-0b44-d19f-62bb1c53aec0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.820561] env[62974]: DEBUG nova.objects.instance [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lazy-loading 'flavor' on Instance uuid 1933bc47-1717-48c1-b4a2-492a17573de7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 697.983469] env[62974]: DEBUG nova.compute.manager [req-9e2219da-c326-4e65-b2b2-6c1e6e3f0f47 req-7163a8a8-4bf1-41f4-9a4c-319651781e50 service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Received event network-vif-plugged-3c57614f-5d9e-48de-b1c1-03931a43e20e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 697.983692] env[62974]: DEBUG oslo_concurrency.lockutils [req-9e2219da-c326-4e65-b2b2-6c1e6e3f0f47 req-7163a8a8-4bf1-41f4-9a4c-319651781e50 service nova] Acquiring lock "d941a678-1b67-4e0f-8806-e6682ef21774-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.983962] env[62974]: DEBUG oslo_concurrency.lockutils [req-9e2219da-c326-4e65-b2b2-6c1e6e3f0f47 req-7163a8a8-4bf1-41f4-9a4c-319651781e50 service nova] Lock "d941a678-1b67-4e0f-8806-e6682ef21774-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.984096] env[62974]: DEBUG oslo_concurrency.lockutils [req-9e2219da-c326-4e65-b2b2-6c1e6e3f0f47 req-7163a8a8-4bf1-41f4-9a4c-319651781e50 service nova] Lock "d941a678-1b67-4e0f-8806-e6682ef21774-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.984263] env[62974]: DEBUG nova.compute.manager [req-9e2219da-c326-4e65-b2b2-6c1e6e3f0f47 req-7163a8a8-4bf1-41f4-9a4c-319651781e50 service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] No waiting events found dispatching network-vif-plugged-3c57614f-5d9e-48de-b1c1-03931a43e20e {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.984426] env[62974]: WARNING nova.compute.manager [req-9e2219da-c326-4e65-b2b2-6c1e6e3f0f47 req-7163a8a8-4bf1-41f4-9a4c-319651781e50 service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Received unexpected event network-vif-plugged-3c57614f-5d9e-48de-b1c1-03931a43e20e for instance with vm_state building and task_state spawning. [ 698.080016] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654057, 'name': Rename_Task, 'duration_secs': 0.137287} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.080459] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.080783] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d75ec657-2775-4ab7-9522-6a472bb00467 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.088493] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 698.088493] env[62974]: value = "task-2654058" [ 698.088493] env[62974]: _type = "Task" [ 698.088493] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.098021] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.143755] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.144245] env[62974]: DEBUG nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 698.147990] env[62974]: DEBUG nova.network.neutron [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Successfully updated port: 3c57614f-5d9e-48de-b1c1-03931a43e20e {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 698.149421] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.438s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.149660] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.151977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.996s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.152434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.154369] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.415s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.154369] env[62974]: DEBUG nova.objects.instance [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 698.186529] env[62974]: INFO nova.scheduler.client.report [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Deleted allocations for instance 41f20cb7-c9f9-4201-ae16-4f977dae26cf [ 698.195114] env[62974]: INFO nova.scheduler.client.report [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Deleted allocations for instance 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245 [ 698.316791] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527852e6-c40b-0b44-d19f-62bb1c53aec0, 'name': SearchDatastore_Task, 'duration_secs': 0.008299} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.317351] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36403cd4-3787-4c9c-8d59-fb0425609189 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.325168] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 698.325168] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528a784f-3296-43e1-f5f4-14468831c44e" [ 698.325168] env[62974]: _type = "Task" [ 698.325168] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.336370] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528a784f-3296-43e1-f5f4-14468831c44e, 'name': SearchDatastore_Task, 'duration_secs': 0.009917} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.339527] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.339527] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 698.339527] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3398707e-f9de-418b-bb89-aef42a4d0f03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.344886] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 698.344886] env[62974]: value = "task-2654059" [ 698.344886] env[62974]: _type = "Task" [ 698.344886] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.354830] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.603890] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654058, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.659356] env[62974]: DEBUG nova.compute.utils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.661657] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "refresh_cache-d941a678-1b67-4e0f-8806-e6682ef21774" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.661797] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "refresh_cache-d941a678-1b67-4e0f-8806-e6682ef21774" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.661943] env[62974]: DEBUG nova.network.neutron [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.668697] env[62974]: DEBUG nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 698.671100] env[62974]: DEBUG nova.network.neutron [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 698.697926] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3c85c48-7a99-4307-90d0-a5ad7b9cb84d tempest-ServersAdmin275Test-1131043217 tempest-ServersAdmin275Test-1131043217-project-member] Lock "41f20cb7-c9f9-4201-ae16-4f977dae26cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.219s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.703017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f548020-32f7-423a-90d3-7f2a5139126e tempest-ServersNegativeTestMultiTenantJSON-407937063 tempest-ServersNegativeTestMultiTenantJSON-407937063-project-member] Lock "605b1e4c-9bd7-41cd-b5fe-05dd5d7af245" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.932s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.781302] env[62974]: DEBUG nova.policy [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f563d2ef3444b77b3d0fa15328d78b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ae52d42e1b04ef890523d2b5834a5de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 698.832737] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a4ab261a-76c5-4996-a08e-090620983e8f tempest-VolumesAssistedSnapshotsTest-1798623771 tempest-VolumesAssistedSnapshotsTest-1798623771-project-admin] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.319s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.863762] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474536} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.864240] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 698.864730] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 698.865141] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-302f7443-7433-4b27-8895-f676ddff907a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.875268] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 698.875268] env[62974]: value = "task-2654060" [ 698.875268] env[62974]: _type = "Task" [ 698.875268] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.883417] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654060, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.099527] env[62974]: DEBUG oslo_vmware.api [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654058, 'name': PowerOnVM_Task, 'duration_secs': 0.683348} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.100242] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.100613] env[62974]: INFO nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Took 8.02 seconds to spawn the instance on the hypervisor. [ 699.100918] env[62974]: DEBUG nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.101860] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b023a66-24fc-4957-9260-9510a92b11f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.166205] env[62974]: DEBUG nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 699.172885] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bc590dad-b8e1-48b7-99ee-15fde05ef227 tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.172885] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.324s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.175277] env[62974]: INFO nova.compute.claims [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.246119] env[62974]: DEBUG nova.network.neutron [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.357876] env[62974]: DEBUG nova.network.neutron [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Successfully created port: 3b60d221-2cab-4e30-8892-d139b511ccc1 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 699.387885] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654060, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074998} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.388062] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 699.389379] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7b1579-6fe3-407a-9110-6e18c0176286 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.415625] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 699.419060] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9246754-a0ce-479a-b0d4-5b009917b5aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.443607] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 699.443607] env[62974]: value = "task-2654062" [ 699.443607] env[62974]: _type = "Task" [ 699.443607] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.449399] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654062, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.599339] env[62974]: DEBUG nova.network.neutron [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Updating instance_info_cache with network_info: [{"id": "3c57614f-5d9e-48de-b1c1-03931a43e20e", "address": "fa:16:3e:1f:1b:bb", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c57614f-5d", "ovs_interfaceid": "3c57614f-5d9e-48de-b1c1-03931a43e20e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.621577] env[62974]: INFO nova.compute.manager [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Took 56.64 seconds to build instance. [ 699.958103] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654062, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.102929] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "refresh_cache-d941a678-1b67-4e0f-8806-e6682ef21774" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.103368] env[62974]: DEBUG nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Instance network_info: |[{"id": "3c57614f-5d9e-48de-b1c1-03931a43e20e", "address": "fa:16:3e:1f:1b:bb", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c57614f-5d", "ovs_interfaceid": "3c57614f-5d9e-48de-b1c1-03931a43e20e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 700.103713] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:1b:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06cc7c49-c46c-4c1e-bf51-77e9ea802c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c57614f-5d9e-48de-b1c1-03931a43e20e', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.114187] env[62974]: DEBUG oslo.service.loopingcall [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.114187] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 700.114187] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cfb776a-9762-4edd-81e0-4b4c972ca684 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.129378] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5d2f3b53-f921-4d19-aaae-0ced626986c8 tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.610s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.137110] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.137110] env[62974]: value = "task-2654063" [ 700.137110] env[62974]: _type = "Task" [ 700.137110] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.148320] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654063, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.179699] env[62974]: DEBUG nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 700.225182] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:52:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c356ba03-298c-489b-984a-f2eae32bbcc6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1135043868',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 700.226872] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.227137] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 700.227394] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.227586] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 700.228046] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 700.228721] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 700.228892] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 700.229444] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 700.232039] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 700.232850] env[62974]: DEBUG nova.virt.hardware [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 700.233459] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d049f4fa-cd1b-4dc2-bf09-c36e6ed700b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.243338] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492cf6d1-8120-49f3-a828-06a63119fb72 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.310459] env[62974]: DEBUG nova.compute.manager [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Received event network-changed-3c57614f-5d9e-48de-b1c1-03931a43e20e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 700.310459] env[62974]: DEBUG nova.compute.manager [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Refreshing instance network info cache due to event network-changed-3c57614f-5d9e-48de-b1c1-03931a43e20e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 700.310459] env[62974]: DEBUG oslo_concurrency.lockutils [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] Acquiring lock "refresh_cache-d941a678-1b67-4e0f-8806-e6682ef21774" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.310459] env[62974]: DEBUG oslo_concurrency.lockutils [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] Acquired lock "refresh_cache-d941a678-1b67-4e0f-8806-e6682ef21774" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.310459] env[62974]: DEBUG nova.network.neutron [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Refreshing network info cache for port 3c57614f-5d9e-48de-b1c1-03931a43e20e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 700.454263] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654062, 'name': ReconfigVM_Task, 'duration_secs': 1.01115} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.454521] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 700.455171] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd0337e2-8be1-477a-b250-40936e6a8243 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.465282] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 700.465282] env[62974]: value = "task-2654064" [ 700.465282] env[62974]: _type = "Task" [ 700.465282] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.479358] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654064, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.635427] env[62974]: DEBUG nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 700.648957] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654063, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.800320] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83ec203-e0c1-422d-bb78-b46ffe9bb132 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.810791] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e46f183-7282-47b2-9435-7e331102623a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.849136] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9bee1d-fb2f-4dfb-a41a-6c498cd5d67f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.858092] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54599ba-f79c-4ad4-b48c-acf06ae6cb92 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.882883] env[62974]: DEBUG nova.compute.provider_tree [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 700.983174] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654064, 'name': Rename_Task, 'duration_secs': 0.188839} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.983174] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 700.983174] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81249c8b-5629-49dc-b953-d10454daf8e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.987718] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 700.987718] env[62974]: value = "task-2654065" [ 700.987718] env[62974]: _type = "Task" [ 700.987718] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.996186] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.157152] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654063, 'name': CreateVM_Task, 'duration_secs': 0.540704} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.157563] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 701.158216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.158381] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.158705] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 701.158970] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52216647-195b-4920-a3b3-4b3c03856a61 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.167478] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 701.167478] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219bfc8-0c39-8e97-9a94-2ad9db1b4e50" [ 701.167478] env[62974]: _type = "Task" [ 701.167478] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.178095] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219bfc8-0c39-8e97-9a94-2ad9db1b4e50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.179227] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.180509] env[62974]: DEBUG nova.network.neutron [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Updated VIF entry in instance network info cache for port 3c57614f-5d9e-48de-b1c1-03931a43e20e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 701.180772] env[62974]: DEBUG nova.network.neutron [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Updating instance_info_cache with network_info: [{"id": "3c57614f-5d9e-48de-b1c1-03931a43e20e", "address": "fa:16:3e:1f:1b:bb", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c57614f-5d", "ovs_interfaceid": "3c57614f-5d9e-48de-b1c1-03931a43e20e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.330115] env[62974]: DEBUG nova.network.neutron [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Successfully updated port: 3b60d221-2cab-4e30-8892-d139b511ccc1 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 701.412302] env[62974]: ERROR nova.scheduler.client.report [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [req-f80b9ada-33d7-40d1-afde-5197f58ff5ce] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f80b9ada-33d7-40d1-afde-5197f58ff5ce"}]} [ 701.442529] env[62974]: DEBUG nova.scheduler.client.report [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 701.462108] env[62974]: DEBUG nova.scheduler.client.report [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 701.462440] env[62974]: DEBUG nova.compute.provider_tree [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 701.482601] env[62974]: DEBUG nova.scheduler.client.report [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 701.502293] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654065, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.509716] env[62974]: DEBUG nova.scheduler.client.report [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 701.683513] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219bfc8-0c39-8e97-9a94-2ad9db1b4e50, 'name': SearchDatastore_Task, 'duration_secs': 0.010771} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.683513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.683513] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 701.683513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.683850] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.683850] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.683969] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2833971-a753-4426-8592-72965033b5e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.689613] env[62974]: DEBUG oslo_concurrency.lockutils [req-b54a285c-c28d-4a93-b399-3510a164d41e req-429eecfc-4027-41d6-8d9f-b5b698160ebb service nova] Releasing lock "refresh_cache-d941a678-1b67-4e0f-8806-e6682ef21774" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.697371] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.697558] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 701.698408] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41df01ad-0b09-4c20-ae94-1102b17e6239 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.706664] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 701.706664] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fa8bf8-6e96-5811-5193-9a52a9327962" [ 701.706664] env[62974]: _type = "Task" [ 701.706664] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.721063] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fa8bf8-6e96-5811-5193-9a52a9327962, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.839023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.839023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.839023] env[62974]: DEBUG nova.network.neutron [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 702.001158] env[62974]: DEBUG oslo_vmware.api [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654065, 'name': PowerOnVM_Task, 'duration_secs': 0.555861} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.004636] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 702.004852] env[62974]: INFO nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Took 8.27 seconds to spawn the instance on the hypervisor. [ 702.005046] env[62974]: DEBUG nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 702.006806] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3168e74-2f64-4f77-a1cb-1a55746e2d3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.084618] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06308be-f5fd-4a39-9bb3-ea720c80c41a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.093480] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e912296f-7410-44b1-87b1-a3838b36d3f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.126901] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933aea58-7777-4df5-bb06-6a17a4b288b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.138458] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dc0ee5-9b52-44f4-a1be-5e14328892b0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.154775] env[62974]: DEBUG nova.compute.provider_tree [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.218292] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fa8bf8-6e96-5811-5193-9a52a9327962, 'name': SearchDatastore_Task, 'duration_secs': 0.027011} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.219587] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46308398-f761-48af-b379-42107655cae8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.225578] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 702.225578] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524b6cea-aacf-4c68-6d35-dd3bbbde293d" [ 702.225578] env[62974]: _type = "Task" [ 702.225578] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.234591] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524b6cea-aacf-4c68-6d35-dd3bbbde293d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.376277] env[62974]: DEBUG nova.network.neutron [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.530796] env[62974]: DEBUG nova.compute.manager [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Received event network-vif-plugged-3b60d221-2cab-4e30-8892-d139b511ccc1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 702.531623] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Acquiring lock "8621428e-cf42-47a4-82c8-a003c377b257-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.531623] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Lock "8621428e-cf42-47a4-82c8-a003c377b257-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.531754] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Lock "8621428e-cf42-47a4-82c8-a003c377b257-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.531948] env[62974]: DEBUG nova.compute.manager [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] No waiting events found dispatching network-vif-plugged-3b60d221-2cab-4e30-8892-d139b511ccc1 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 702.532132] env[62974]: WARNING nova.compute.manager [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Received unexpected event network-vif-plugged-3b60d221-2cab-4e30-8892-d139b511ccc1 for instance with vm_state building and task_state spawning. [ 702.532300] env[62974]: DEBUG nova.compute.manager [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Received event network-changed-3b60d221-2cab-4e30-8892-d139b511ccc1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 702.532454] env[62974]: DEBUG nova.compute.manager [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Refreshing instance network info cache due to event network-changed-3b60d221-2cab-4e30-8892-d139b511ccc1. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 702.532717] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Acquiring lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.535292] env[62974]: INFO nova.compute.manager [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Took 53.87 seconds to build instance. [ 702.569586] env[62974]: DEBUG nova.network.neutron [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance_info_cache with network_info: [{"id": "3b60d221-2cab-4e30-8892-d139b511ccc1", "address": "fa:16:3e:80:cf:bd", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b60d221-2c", "ovs_interfaceid": "3b60d221-2cab-4e30-8892-d139b511ccc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.726733] env[62974]: DEBUG nova.scheduler.client.report [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 66 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 702.727028] env[62974]: DEBUG nova.compute.provider_tree [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 66 to 67 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 702.727243] env[62974]: DEBUG nova.compute.provider_tree [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.750529] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524b6cea-aacf-4c68-6d35-dd3bbbde293d, 'name': SearchDatastore_Task, 'duration_secs': 0.030747} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.750897] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.751276] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] d941a678-1b67-4e0f-8806-e6682ef21774/d941a678-1b67-4e0f-8806-e6682ef21774.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 702.753953] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c098741-7d25-4bca-88fa-a859cc560711 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.759719] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 702.759719] env[62974]: value = "task-2654066" [ 702.759719] env[62974]: _type = "Task" [ 702.759719] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.769279] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.779472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "1933bc47-1717-48c1-b4a2-492a17573de7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.779563] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.779772] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "1933bc47-1717-48c1-b4a2-492a17573de7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.779956] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "1933bc47-1717-48c1-b4a2-492a17573de7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.780136] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "1933bc47-1717-48c1-b4a2-492a17573de7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.782309] env[62974]: INFO nova.compute.manager [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Terminating instance [ 703.037708] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3034caa1-3f8f-477b-ac61-18144bfb497f tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.522s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.078713] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.078713] env[62974]: DEBUG nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Instance network_info: |[{"id": "3b60d221-2cab-4e30-8892-d139b511ccc1", "address": "fa:16:3e:80:cf:bd", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b60d221-2c", "ovs_interfaceid": "3b60d221-2cab-4e30-8892-d139b511ccc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 703.078839] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Acquired lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.078839] env[62974]: DEBUG nova.network.neutron [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Refreshing network info cache for port 3b60d221-2cab-4e30-8892-d139b511ccc1 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.078839] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:cf:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b60d221-2cab-4e30-8892-d139b511ccc1', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 703.086831] env[62974]: DEBUG oslo.service.loopingcall [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.090778] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 703.090917] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c6d68e6-b56e-42c5-9c3e-0ce08efd5ced {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.113664] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 703.113664] env[62974]: value = "task-2654067" [ 703.113664] env[62974]: _type = "Task" [ 703.113664] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.133284] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654067, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.241280] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.069s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.241899] env[62974]: DEBUG nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 703.244652] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.137s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.247902] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.252377] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.028s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.252377] env[62974]: INFO nova.compute.claims [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.272634] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508349} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.276116] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] d941a678-1b67-4e0f-8806-e6682ef21774/d941a678-1b67-4e0f-8806-e6682ef21774.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 703.276116] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 703.276116] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a0dd6c9-e4a0-4cd1-87b5-58b595c6dd82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.288320] env[62974]: DEBUG nova.compute.manager [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 703.288541] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 703.288895] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 703.288895] env[62974]: value = "task-2654068" [ 703.288895] env[62974]: _type = "Task" [ 703.288895] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.289872] env[62974]: INFO nova.scheduler.client.report [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Deleted allocations for instance 6e81e765-4fe3-42a7-a0ba-9860be897a70 [ 703.294526] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3add13b3-8de5-4adf-be26-901c88c9f1f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.315370] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.315690] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.315988] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfb15f27-6e77-4e66-b4f2-1409ed75ef0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.322261] env[62974]: DEBUG oslo_vmware.api [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 703.322261] env[62974]: value = "task-2654069" [ 703.322261] env[62974]: _type = "Task" [ 703.322261] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.332075] env[62974]: DEBUG oslo_vmware.api [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2654069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.453381] env[62974]: DEBUG nova.network.neutron [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updated VIF entry in instance network info cache for port 3b60d221-2cab-4e30-8892-d139b511ccc1. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 703.453740] env[62974]: DEBUG nova.network.neutron [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance_info_cache with network_info: [{"id": "3b60d221-2cab-4e30-8892-d139b511ccc1", "address": "fa:16:3e:80:cf:bd", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b60d221-2c", "ovs_interfaceid": "3b60d221-2cab-4e30-8892-d139b511ccc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.544807] env[62974]: DEBUG nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 703.624315] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654067, 'name': CreateVM_Task, 'duration_secs': 0.436783} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.624445] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 703.625087] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.625224] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.625515] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 703.625808] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64cc3a5c-db97-4544-846e-0dab257c06f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.630679] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 703.630679] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f69bf2-b7f8-b2cf-bdb7-98552c97a681" [ 703.630679] env[62974]: _type = "Task" [ 703.630679] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.640114] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f69bf2-b7f8-b2cf-bdb7-98552c97a681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.759097] env[62974]: DEBUG nova.compute.utils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.765366] env[62974]: DEBUG nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 703.765366] env[62974]: DEBUG nova.network.neutron [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 703.817253] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084134} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.817843] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d8441e6c-10ca-4cf4-98eb-8db7308ed6ae tempest-ServerShowV257Test-520911113 tempest-ServerShowV257Test-520911113-project-member] Lock "6e81e765-4fe3-42a7-a0ba-9860be897a70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.932s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.818853] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 703.822108] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1873c4-ef9c-4230-808f-6edc788c0696 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.854683] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] d941a678-1b67-4e0f-8806-e6682ef21774/d941a678-1b67-4e0f-8806-e6682ef21774.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 703.856608] env[62974]: DEBUG nova.policy [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85705a53f9314b08aed10199854f0d2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc2dc33e40e549d1a025e4b883c4dfb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 703.861667] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd0c045a-8291-430f-a6bd-9f87c50a6c02 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.878502] env[62974]: DEBUG oslo_vmware.api [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2654069, 'name': PowerOffVM_Task, 'duration_secs': 0.321045} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.881127] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 703.881127] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 703.881127] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-864845fe-c081-4df5-8eeb-9c7d68c0e61f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.885853] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 703.885853] env[62974]: value = "task-2654070" [ 703.885853] env[62974]: _type = "Task" [ 703.885853] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.895567] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654070, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.950114] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 703.950114] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 703.950114] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Deleting the datastore file [datastore2] 1933bc47-1717-48c1-b4a2-492a17573de7 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 703.950114] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dd8fbe5-5827-4e16-bc69-df5969800dfc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.957042] env[62974]: DEBUG oslo_vmware.api [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for the task: (returnval){ [ 703.957042] env[62974]: value = "task-2654072" [ 703.957042] env[62974]: _type = "Task" [ 703.957042] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.962083] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Releasing lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.962427] env[62974]: DEBUG nova.compute.manager [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Received event network-changed-30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 703.962696] env[62974]: DEBUG nova.compute.manager [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Refreshing instance network info cache due to event network-changed-30d94be9-1598-40ca-95b1-cfe821557367. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 703.963097] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Acquiring lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.963348] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Acquired lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.963565] env[62974]: DEBUG nova.network.neutron [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Refreshing network info cache for port 30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.971109] env[62974]: DEBUG oslo_vmware.api [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2654072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.080432] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.141718] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f69bf2-b7f8-b2cf-bdb7-98552c97a681, 'name': SearchDatastore_Task, 'duration_secs': 0.008962} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.142054] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.142287] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 704.142538] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.142673] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.142846] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 704.143160] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5e1cca9-9127-44f3-a1e3-23718dbfd70a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.152097] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 704.152286] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 704.153020] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f06b025-8047-43d6-8fff-f9907ba62cd1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.159050] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 704.159050] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d04d07-3319-a307-530f-90bbc1878f3d" [ 704.159050] env[62974]: _type = "Task" [ 704.159050] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.167504] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d04d07-3319-a307-530f-90bbc1878f3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.266977] env[62974]: DEBUG nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 704.402478] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654070, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.438628] env[62974]: DEBUG nova.network.neutron [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Successfully created port: 5e6afe42-2743-40f8-8491-2b441697f6aa {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.469975] env[62974]: DEBUG oslo_vmware.api [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Task: {'id': task-2654072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140792} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.471508] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.471508] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.471508] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.471508] env[62974]: INFO nova.compute.manager [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 704.471508] env[62974]: DEBUG oslo.service.loopingcall [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 704.472319] env[62974]: DEBUG nova.compute.manager [-] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 704.472319] env[62974]: DEBUG nova.network.neutron [-] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.672236] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d04d07-3319-a307-530f-90bbc1878f3d, 'name': SearchDatastore_Task, 'duration_secs': 0.00942} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.673061] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91434d08-381f-425f-a24a-cec293208748 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.681031] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 704.681031] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fcc2e7-e47b-5f09-d565-a645cb5965fa" [ 704.681031] env[62974]: _type = "Task" [ 704.681031] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.688822] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fcc2e7-e47b-5f09-d565-a645cb5965fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.784519] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf74a6f-6dbd-436c-bbb4-c6906c9472ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.791748] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d080b2f-b54f-46e2-8aed-70b3ec568d8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.595082] env[62974]: INFO nova.compute.manager [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Rebuilding instance [ 705.600604] env[62974]: DEBUG nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 705.609372] env[62974]: DEBUG nova.compute.manager [req-1add2498-5632-4cd9-9453-7f2110364d75 req-80c2a1ff-e3a6-4584-ae51-8908eb114c98 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Received event network-vif-deleted-47b61932-1b0f-4b88-9565-96bf61bb3912 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 705.609601] env[62974]: INFO nova.compute.manager [req-1add2498-5632-4cd9-9453-7f2110364d75 req-80c2a1ff-e3a6-4584-ae51-8908eb114c98 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Neutron deleted interface 47b61932-1b0f-4b88-9565-96bf61bb3912; detaching it from the instance and deleting it from the info cache [ 705.609729] env[62974]: DEBUG nova.network.neutron [req-1add2498-5632-4cd9-9453-7f2110364d75 req-80c2a1ff-e3a6-4584-ae51-8908eb114c98 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.614941] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ab6c4a-9a37-4f5f-87f3-8aa10a442289 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.627404] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654070, 'name': ReconfigVM_Task, 'duration_secs': 0.702335} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.633582] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Reconfigured VM instance instance-00000028 to attach disk [datastore1] d941a678-1b67-4e0f-8806-e6682ef21774/d941a678-1b67-4e0f-8806-e6682ef21774.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.634488] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fcc2e7-e47b-5f09-d565-a645cb5965fa, 'name': SearchDatastore_Task, 'duration_secs': 0.009268} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.635960] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17b78371-b05f-438a-9f1f-bdef60034033 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.641599] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1854f752-5df3-4c55-819e-bc0e30b9eb8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.643843] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.646301] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 705.647992] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98d48fd1-c3fb-4873-9ec9-49e4482861f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.662947] env[62974]: DEBUG nova.compute.provider_tree [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.667464] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 705.667670] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.667835] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.668037] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.668517] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.668855] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 705.668967] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 705.670234] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 705.670234] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 705.670234] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 705.670234] env[62974]: DEBUG nova.virt.hardware [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 705.673531] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 705.673531] env[62974]: value = "task-2654074" [ 705.673531] env[62974]: _type = "Task" [ 705.673531] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.674227] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878d9b8c-2ccc-410a-9bd2-18b0bcfe4449 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.677314] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 705.677314] env[62974]: value = "task-2654073" [ 705.677314] env[62974]: _type = "Task" [ 705.677314] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.696110] env[62974]: DEBUG nova.compute.manager [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.697095] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabc0253-f344-4b78-9dc0-37490b61772a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.702396] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d452d4b9-f2c7-4502-b8c5-0ccfd62add82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.711608] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654073, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.711949] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.727042] env[62974]: DEBUG nova.scheduler.client.report [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 705.727042] env[62974]: DEBUG nova.compute.provider_tree [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 67 to 68 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 705.727042] env[62974]: DEBUG nova.compute.provider_tree [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.868755] env[62974]: DEBUG nova.network.neutron [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Updated VIF entry in instance network info cache for port 30d94be9-1598-40ca-95b1-cfe821557367. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 705.869196] env[62974]: DEBUG nova.network.neutron [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Updating instance_info_cache with network_info: [{"id": "30d94be9-1598-40ca-95b1-cfe821557367", "address": "fa:16:3e:c9:b6:f3", "network": {"id": "435a92ee-f960-40ce-986f-266bccc22f8f", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1755820183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4598f0721bf64ba4b3aac7af57f60f02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d94be9-15", "ovs_interfaceid": "30d94be9-1598-40ca-95b1-cfe821557367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.120623] env[62974]: DEBUG nova.network.neutron [-] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.122034] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a75cc40-0b89-4520-adff-6c366f72e8e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.133020] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6b6a73-2cdb-4c95-9882-f56a28e6dd43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.183135] env[62974]: DEBUG nova.compute.manager [req-1add2498-5632-4cd9-9453-7f2110364d75 req-80c2a1ff-e3a6-4584-ae51-8908eb114c98 service nova] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Detach interface failed, port_id=47b61932-1b0f-4b88-9565-96bf61bb3912, reason: Instance 1933bc47-1717-48c1-b4a2-492a17573de7 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 706.197963] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654074, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.202026] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654073, 'name': Rename_Task, 'duration_secs': 0.257667} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.202314] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.202624] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33e3d6c2-1d8c-4575-aaa6-df81467d49a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.209356] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 706.209356] env[62974]: value = "task-2654075" [ 706.209356] env[62974]: _type = "Task" [ 706.209356] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.217806] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654075, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.232913] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.983s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.233405] env[62974]: DEBUG nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 706.238643] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.635s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.240191] env[62974]: INFO nova.compute.claims [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.373951] env[62974]: DEBUG oslo_concurrency.lockutils [req-8df22f17-24ad-4ba4-b0d4-6601bfb6da95 req-7b167c2e-efe2-4c51-b464-8529b0052468 service nova] Releasing lock "refresh_cache-c763d45b-44f0-4557-a726-7aad2bc58ba8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.625429] env[62974]: INFO nova.compute.manager [-] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Took 2.15 seconds to deallocate network for instance. [ 706.647118] env[62974]: DEBUG nova.network.neutron [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Successfully updated port: 5e6afe42-2743-40f8-8491-2b441697f6aa {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.695344] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654074, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.722612] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654075, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.733303] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 706.733303] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eab2afcd-7171-4321-9bce-572e448e9e28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.737090] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 706.737090] env[62974]: value = "task-2654076" [ 706.737090] env[62974]: _type = "Task" [ 706.737090] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.750727] env[62974]: DEBUG nova.compute.utils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 706.753087] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.753743] env[62974]: DEBUG nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 706.753949] env[62974]: DEBUG nova.network.neutron [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 706.813673] env[62974]: DEBUG nova.policy [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd28ad3c4ce834324a129a39ec22ae77b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a368a28171f74897b6d3918fe7915b4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 707.137423] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.150695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "refresh_cache-da43a464-ebae-4038-9f7b-330df22d8d7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.150695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "refresh_cache-da43a464-ebae-4038-9f7b-330df22d8d7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.150845] env[62974]: DEBUG nova.network.neutron [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.200443] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654074, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.522372} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.200443] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.200443] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.200443] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd290742-ce39-4903-8a27-6fa3a31e5bc2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.208837] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 707.208837] env[62974]: value = "task-2654077" [ 707.208837] env[62974]: _type = "Task" [ 707.208837] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.229611] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.229770] env[62974]: DEBUG oslo_vmware.api [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654075, 'name': PowerOnVM_Task, 'duration_secs': 0.790619} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.230019] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.230243] env[62974]: INFO nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Took 9.81 seconds to spawn the instance on the hypervisor. [ 707.230436] env[62974]: DEBUG nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 707.231579] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d08a477-9149-443f-aab0-162fc0163cea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.251507] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654076, 'name': PowerOffVM_Task, 'duration_secs': 0.219375} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.251716] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 707.251966] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 707.252932] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890bde66-2800-4221-b925-a0b0d9dd6cbd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.256122] env[62974]: DEBUG nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 707.268444] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 707.268444] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e70fdb9-3596-474f-9bf3-897064ce5abf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.322426] env[62974]: DEBUG nova.network.neutron [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Successfully created port: 8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 707.368565] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 707.369014] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 707.369218] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleting the datastore file [datastore1] 669cd72c-556f-40b6-8bc2-f50a125c182a {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 707.369567] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcf16692-b818-42a0-8240-d1e6bf4fc73c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.376604] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 707.376604] env[62974]: value = "task-2654079" [ 707.376604] env[62974]: _type = "Task" [ 707.376604] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.387221] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.666323] env[62974]: DEBUG nova.compute.manager [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Received event network-vif-plugged-5e6afe42-2743-40f8-8491-2b441697f6aa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 707.666585] env[62974]: DEBUG oslo_concurrency.lockutils [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] Acquiring lock "da43a464-ebae-4038-9f7b-330df22d8d7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.667877] env[62974]: DEBUG oslo_concurrency.lockutils [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.667877] env[62974]: DEBUG oslo_concurrency.lockutils [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.667877] env[62974]: DEBUG nova.compute.manager [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] No waiting events found dispatching network-vif-plugged-5e6afe42-2743-40f8-8491-2b441697f6aa {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 707.667877] env[62974]: WARNING nova.compute.manager [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Received unexpected event network-vif-plugged-5e6afe42-2743-40f8-8491-2b441697f6aa for instance with vm_state building and task_state spawning. [ 707.667877] env[62974]: DEBUG nova.compute.manager [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Received event network-changed-5e6afe42-2743-40f8-8491-2b441697f6aa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 707.668078] env[62974]: DEBUG nova.compute.manager [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Refreshing instance network info cache due to event network-changed-5e6afe42-2743-40f8-8491-2b441697f6aa. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 707.668078] env[62974]: DEBUG oslo_concurrency.lockutils [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] Acquiring lock "refresh_cache-da43a464-ebae-4038-9f7b-330df22d8d7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.719614] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066784} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.719877] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 707.720755] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51577703-61e7-443e-8803-077ce23110f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.744772] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.757101] env[62974]: INFO nova.compute.manager [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Took 56.99 seconds to build instance. [ 707.757101] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52ac78d9-7d85-4744-b36e-2a99aa7a28f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.779074] env[62974]: INFO nova.virt.block_device [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Booting with volume 580d4492-2e68-4792-86d6-404ee3e08942 at /dev/sda [ 707.786050] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 707.786050] env[62974]: value = "task-2654080" [ 707.786050] env[62974]: _type = "Task" [ 707.786050] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.799998] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.853623] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fba669bd-ff79-41f6-9c75-fdfe9020080b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.857844] env[62974]: DEBUG nova.network.neutron [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.869446] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4907bc25-e1b9-4f83-ab25-35cf7d545c3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.883852] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6625406-32bb-4167-b35f-f50bcf12443c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.897349] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad2f9e9-7675-4c21-b68f-7e0cc0706202 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.900642] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131641} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.912631] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 707.912847] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 707.913096] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 707.919477] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13440979-d08f-4ce6-adcc-5c4db0231deb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.948493] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec9d764-dff0-41c7-9ff1-5e191d28096f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.958211] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff0ea57-0547-4717-80b2-1b1f472d53c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.975484] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6313cfce-1e3c-4d1e-b559-089e3566e0ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.003914] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8da8786-fd7b-4d17-8577-17a29d1133cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.008332] env[62974]: DEBUG nova.compute.provider_tree [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 708.016230] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fb9e62-49cb-40d9-aef2-5e1518b3709a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.032169] env[62974]: DEBUG nova.virt.block_device [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Updating existing volume attachment record: 918be264-c3ad-419f-aec8-cc45e7697967 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 708.092287] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52529938-8050-4273-9a4f-140061747fc9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.099303] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca191b5-0b6b-4efd-b44d-5a81b90e145c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Suspending the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 708.099859] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-4f054b8a-2033-4bb5-b4d1-1c7de6eab8ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.107220] env[62974]: DEBUG oslo_vmware.api [None req-8ca191b5-0b6b-4efd-b44d-5a81b90e145c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 708.107220] env[62974]: value = "task-2654081" [ 708.107220] env[62974]: _type = "Task" [ 708.107220] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.115580] env[62974]: DEBUG oslo_vmware.api [None req-8ca191b5-0b6b-4efd-b44d-5a81b90e145c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654081, 'name': SuspendVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.129384] env[62974]: DEBUG nova.network.neutron [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Updating instance_info_cache with network_info: [{"id": "5e6afe42-2743-40f8-8491-2b441697f6aa", "address": "fa:16:3e:73:98:3f", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e6afe42-27", "ovs_interfaceid": "5e6afe42-2743-40f8-8491-2b441697f6aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.280012] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eab76dd2-7dfd-40d4-918e-98a7f391471f tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "d941a678-1b67-4e0f-8806-e6682ef21774" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.226s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.299173] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654080, 'name': ReconfigVM_Task, 'duration_secs': 0.288288} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.300106] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 708.300767] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa5402aa-7857-450e-b8fb-572d09749d04 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.310405] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 708.310405] env[62974]: value = "task-2654082" [ 708.310405] env[62974]: _type = "Task" [ 708.310405] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.322128] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654082, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.545507] env[62974]: DEBUG nova.scheduler.client.report [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 708.545752] env[62974]: DEBUG nova.compute.provider_tree [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 68 to 69 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 708.546117] env[62974]: DEBUG nova.compute.provider_tree [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 708.617660] env[62974]: DEBUG oslo_vmware.api [None req-8ca191b5-0b6b-4efd-b44d-5a81b90e145c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654081, 'name': SuspendVM_Task} progress is 62%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.631962] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "refresh_cache-da43a464-ebae-4038-9f7b-330df22d8d7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.632330] env[62974]: DEBUG nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Instance network_info: |[{"id": "5e6afe42-2743-40f8-8491-2b441697f6aa", "address": "fa:16:3e:73:98:3f", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e6afe42-27", "ovs_interfaceid": "5e6afe42-2743-40f8-8491-2b441697f6aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 708.632626] env[62974]: DEBUG oslo_concurrency.lockutils [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] Acquired lock "refresh_cache-da43a464-ebae-4038-9f7b-330df22d8d7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.632797] env[62974]: DEBUG nova.network.neutron [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Refreshing network info cache for port 5e6afe42-2743-40f8-8491-2b441697f6aa {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.634214] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:98:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e6afe42-2743-40f8-8491-2b441697f6aa', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.641932] env[62974]: DEBUG oslo.service.loopingcall [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.642388] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 708.644054] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac777d9f-22cb-4057-8c8a-eb7fd5a8ea03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.663404] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.663404] env[62974]: value = "task-2654083" [ 708.663404] env[62974]: _type = "Task" [ 708.663404] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.671647] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654083, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.785517] env[62974]: DEBUG nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 708.824311] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654082, 'name': Rename_Task, 'duration_secs': 0.153154} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.824614] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.824867] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82c4ff91-8027-498f-912d-6399d292149b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.832794] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 708.832794] env[62974]: value = "task-2654084" [ 708.832794] env[62974]: _type = "Task" [ 708.832794] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.844338] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654084, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.955955] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 708.956498] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.956498] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 708.956671] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.957554] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 708.957554] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 708.957554] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 708.957554] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 708.957554] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 708.958697] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 708.958697] env[62974]: DEBUG nova.virt.hardware [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 708.958804] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe4f153-4324-4147-ad07-d441ba9ff75b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.967476] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317f2551-6a1f-4d77-9b29-261218656047 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.981604] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:25:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b71f7882-2184-4093-856d-a4bbc389dd03', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.989358] env[62974]: DEBUG oslo.service.loopingcall [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.989685] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 708.989981] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62cae07e-aadf-47f4-ac27-1ca7006a0c73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.012297] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.012297] env[62974]: value = "task-2654085" [ 709.012297] env[62974]: _type = "Task" [ 709.012297] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.022602] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654085, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.056669] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.056669] env[62974]: DEBUG nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 709.059604] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.275s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.059933] env[62974]: DEBUG nova.objects.instance [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 709.116598] env[62974]: DEBUG oslo_vmware.api [None req-8ca191b5-0b6b-4efd-b44d-5a81b90e145c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654081, 'name': SuspendVM_Task, 'duration_secs': 0.765792} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.116876] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca191b5-0b6b-4efd-b44d-5a81b90e145c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Suspended the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 709.117072] env[62974]: DEBUG nova.compute.manager [None req-8ca191b5-0b6b-4efd-b44d-5a81b90e145c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.117884] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f899a35-5525-4fc7-90a6-d958056c01b6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.180677] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654083, 'name': CreateVM_Task, 'duration_secs': 0.351197} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.180677] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 709.180677] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.180677] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.180677] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.182450] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8122f1cf-bff4-4273-abcd-347fda036545 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.188666] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 709.188666] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529629af-ca75-90d5-7825-0a2e5c493d66" [ 709.188666] env[62974]: _type = "Task" [ 709.188666] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.200222] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529629af-ca75-90d5-7825-0a2e5c493d66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.317342] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.346053] env[62974]: DEBUG oslo_vmware.api [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654084, 'name': PowerOnVM_Task, 'duration_secs': 0.489871} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.349726] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.349726] env[62974]: INFO nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Took 9.17 seconds to spawn the instance on the hypervisor. [ 709.349726] env[62974]: DEBUG nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.349726] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edcae21-bf9a-4785-9502-64b4b38c30df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.495914] env[62974]: DEBUG nova.network.neutron [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Successfully updated port: 8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 709.509488] env[62974]: DEBUG nova.network.neutron [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Updated VIF entry in instance network info cache for port 5e6afe42-2743-40f8-8491-2b441697f6aa. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.509828] env[62974]: DEBUG nova.network.neutron [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Updating instance_info_cache with network_info: [{"id": "5e6afe42-2743-40f8-8491-2b441697f6aa", "address": "fa:16:3e:73:98:3f", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e6afe42-27", "ovs_interfaceid": "5e6afe42-2743-40f8-8491-2b441697f6aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.522732] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654085, 'name': CreateVM_Task, 'duration_secs': 0.336374} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.522884] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 709.523503] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.564371] env[62974]: DEBUG nova.compute.utils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.570451] env[62974]: DEBUG nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 709.571146] env[62974]: DEBUG nova.network.neutron [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 709.625438] env[62974]: DEBUG nova.policy [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37c0028b341743dca487b9fa9351a664', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6670a9c979e41bb9d626141bb979ad9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 709.659611] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "12c769fb-8c9e-4089-9563-232cfad89b21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.659840] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "12c769fb-8c9e-4089-9563-232cfad89b21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.699788] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529629af-ca75-90d5-7825-0a2e5c493d66, 'name': SearchDatastore_Task, 'duration_secs': 0.016513} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.700143] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.700411] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.700637] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.700782] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.701092] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.701241] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.701531] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.701757] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa7352f1-9011-4fc5-9595-301afd3de79a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.703662] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f158d4-a6d8-4ca1-b000-8b7815eec1ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.712884] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 709.712884] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526f57d3-4112-31d7-8246-ec75986e29ea" [ 709.712884] env[62974]: _type = "Task" [ 709.712884] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.716939] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.716992] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 709.717974] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2cb3323-d1cc-4847-8694-a933aa58206b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.724950] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526f57d3-4112-31d7-8246-ec75986e29ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.726552] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 709.726552] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5230cfb8-6ef2-3a97-0702-c69b85c3c794" [ 709.726552] env[62974]: _type = "Task" [ 709.726552] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.734611] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5230cfb8-6ef2-3a97-0702-c69b85c3c794, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.857497] env[62974]: DEBUG nova.compute.manager [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Received event network-vif-plugged-8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 709.857734] env[62974]: DEBUG oslo_concurrency.lockutils [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] Acquiring lock "a14e7e40-afef-4607-8fa9-935a92ea49dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.857950] env[62974]: DEBUG oslo_concurrency.lockutils [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.858137] env[62974]: DEBUG oslo_concurrency.lockutils [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.858304] env[62974]: DEBUG nova.compute.manager [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] No waiting events found dispatching network-vif-plugged-8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.858468] env[62974]: WARNING nova.compute.manager [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Received unexpected event network-vif-plugged-8ad12b0d-168c-4485-b856-6649ee5fe3a4 for instance with vm_state building and task_state spawning. [ 709.858664] env[62974]: DEBUG nova.compute.manager [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Received event network-changed-8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 709.858823] env[62974]: DEBUG nova.compute.manager [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Refreshing instance network info cache due to event network-changed-8ad12b0d-168c-4485-b856-6649ee5fe3a4. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 709.858984] env[62974]: DEBUG oslo_concurrency.lockutils [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] Acquiring lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.859155] env[62974]: DEBUG oslo_concurrency.lockutils [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] Acquired lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.859314] env[62974]: DEBUG nova.network.neutron [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Refreshing network info cache for port 8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.869418] env[62974]: INFO nova.compute.manager [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Took 59.01 seconds to build instance. [ 710.001410] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Acquiring lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.012663] env[62974]: DEBUG oslo_concurrency.lockutils [req-07a74b6a-b9bd-4448-ab3e-5d5efd942775 req-1c69e20e-035c-49af-b9ee-8b3619481907 service nova] Releasing lock "refresh_cache-da43a464-ebae-4038-9f7b-330df22d8d7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.015707] env[62974]: DEBUG nova.network.neutron [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Successfully created port: 1d12bec7-b3ee-4922-b371-3279eb60ac8f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.070216] env[62974]: DEBUG nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 710.074379] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0e0218f3-a27a-4920-9ddb-4ddf6dd9c765 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.075469] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.556s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.078550] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.078824] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.138s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.079155] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.080964] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.657s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.081225] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.083828] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.490s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.084699] env[62974]: INFO nova.compute.claims [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.112733] env[62974]: INFO nova.scheduler.client.report [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleted allocations for instance 28c247f6-3179-425d-ae1c-615151b1e2ff [ 710.115638] env[62974]: INFO nova.scheduler.client.report [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Deleted allocations for instance 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc [ 710.134391] env[62974]: INFO nova.scheduler.client.report [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Deleted allocations for instance a63aa120-1c7b-4abc-93cf-4d138f5cebde [ 710.157175] env[62974]: DEBUG nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 710.157762] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 710.158008] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.158229] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 710.158380] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.158891] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 710.158891] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 710.158964] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 710.159397] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 710.160030] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 710.160240] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 710.161149] env[62974]: DEBUG nova.virt.hardware [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 710.161980] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdefc61f-449c-488e-a83e-17323994a04d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.173621] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c68ea6d-462f-4ec0-bf6a-26f65e0313e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.222633] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526f57d3-4112-31d7-8246-ec75986e29ea, 'name': SearchDatastore_Task, 'duration_secs': 0.016265} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.222914] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.223161] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 710.223357] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.236618] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5230cfb8-6ef2-3a97-0702-c69b85c3c794, 'name': SearchDatastore_Task, 'duration_secs': 0.014137} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.237444] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f26dc98-5bf5-47d3-8ca4-f14f79326a08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.242886] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 710.242886] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f87b6e-09b1-c09b-7c6d-65dc23399faa" [ 710.242886] env[62974]: _type = "Task" [ 710.242886] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.251958] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f87b6e-09b1-c09b-7c6d-65dc23399faa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.371216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8331ccb4-592b-4919-9b14-ca1e98524ec7 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.051s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.394273] env[62974]: DEBUG nova.network.neutron [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.485091] env[62974]: DEBUG nova.network.neutron [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.624850] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e39d0778-2fd5-41bc-9a8a-8928f263a935 tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "28c247f6-3179-425d-ae1c-615151b1e2ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.888s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.627092] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5ba1a4b-6d33-4e30-9793-ffaf21957c2f tempest-ServersTestManualDisk-1453380906 tempest-ServersTestManualDisk-1453380906-project-member] Lock "7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.408s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.648828] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89e79943-0cc6-46a2-92fb-2c7f15e8f64a tempest-FloatingIPsAssociationTestJSON-676402080 tempest-FloatingIPsAssociationTestJSON-676402080-project-member] Lock "a63aa120-1c7b-4abc-93cf-4d138f5cebde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.663s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.755846] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f87b6e-09b1-c09b-7c6d-65dc23399faa, 'name': SearchDatastore_Task, 'duration_secs': 0.025119} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.755846] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.756215] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] da43a464-ebae-4038-9f7b-330df22d8d7c/da43a464-ebae-4038-9f7b-330df22d8d7c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 710.756870] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.756870] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.756870] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c7069ba-c6b8-4969-a17f-b8ff04d25cb9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.759412] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0243aed7-0e93-4312-b3cc-d128a84033d4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.768215] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 710.768215] env[62974]: value = "task-2654086" [ 710.768215] env[62974]: _type = "Task" [ 710.768215] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.773424] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.773626] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 710.774758] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55ef94d2-514d-4f62-b8db-3d2cec80f074 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.780637] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654086, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.783659] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 710.783659] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d0757d-5b1d-ade4-caa5-bde61a078627" [ 710.783659] env[62974]: _type = "Task" [ 710.783659] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.792529] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d0757d-5b1d-ade4-caa5-bde61a078627, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.874134] env[62974]: DEBUG nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 710.987804] env[62974]: DEBUG oslo_concurrency.lockutils [req-8671300b-f466-4f93-a25b-4054cca0f08c req-adc6e8c6-9d9c-4bb3-91f1-76bc049fb01e service nova] Releasing lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.988356] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Acquired lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.988629] env[62974]: DEBUG nova.network.neutron [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.084046] env[62974]: DEBUG nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 711.111900] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 711.113160] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.113160] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 711.113160] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.113160] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 711.113160] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 711.113401] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 711.113401] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 711.113401] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 711.113523] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 711.113698] env[62974]: DEBUG nova.virt.hardware [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 711.114626] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50c3f50-1060-4233-bac7-e46a2fa12ff3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.138361] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a78d96-8719-4668-83f4-75baa3423031 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.283960] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654086, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.294505] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d0757d-5b1d-ade4-caa5-bde61a078627, 'name': SearchDatastore_Task, 'duration_secs': 0.01063} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.299303] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca627bbc-0341-42f3-ac0c-d933cbe57695 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.309257] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 711.309257] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d22b9-e102-0a17-a113-1c31111d46b8" [ 711.309257] env[62974]: _type = "Task" [ 711.309257] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.321377] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d22b9-e102-0a17-a113-1c31111d46b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.394765] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.520244] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "5bc466fb-eebb-40b1-ba09-614a25782ecd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.520500] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "5bc466fb-eebb-40b1-ba09-614a25782ecd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.520705] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "5bc466fb-eebb-40b1-ba09-614a25782ecd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.521077] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "5bc466fb-eebb-40b1-ba09-614a25782ecd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.521281] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "5bc466fb-eebb-40b1-ba09-614a25782ecd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.523542] env[62974]: INFO nova.compute.manager [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Terminating instance [ 711.566374] env[62974]: DEBUG nova.network.neutron [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.581746] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b566a5-d802-44de-86bc-2e86ac63a8a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.589008] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e858d39-5456-4a82-97dc-7d746ccfc574 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.622578] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b75e5cb-3806-445a-9406-582b086aff09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.636016] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa36c46-fa47-4619-93ae-6e145d51ed76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.638044] env[62974]: DEBUG nova.compute.manager [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 711.650848] env[62974]: DEBUG nova.compute.provider_tree [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.746618] env[62974]: DEBUG nova.compute.manager [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.747583] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd3bf15-ac91-4733-b7f9-38920b6ace6d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.780959] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654086, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615184} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.782420] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] da43a464-ebae-4038-9f7b-330df22d8d7c/da43a464-ebae-4038-9f7b-330df22d8d7c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 711.782420] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.782420] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f9bc560-4ea7-4824-a44e-7b7d5ebafc54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.789544] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 711.789544] env[62974]: value = "task-2654087" [ 711.789544] env[62974]: _type = "Task" [ 711.789544] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.799016] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654087, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.818897] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d22b9-e102-0a17-a113-1c31111d46b8, 'name': SearchDatastore_Task, 'duration_secs': 0.036069} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.819187] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.819451] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.819710] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-370cff85-6006-4b63-b99c-0bfb5f081428 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.827770] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 711.827770] env[62974]: value = "task-2654088" [ 711.827770] env[62974]: _type = "Task" [ 711.827770] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.837432] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.937915] env[62974]: DEBUG nova.compute.manager [req-bb205049-26b4-4bcb-b488-fafbb6578da9 req-e18adfed-2ad7-4a83-81f5-da6adc898a64 service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Received event network-vif-plugged-1d12bec7-b3ee-4922-b371-3279eb60ac8f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 711.940092] env[62974]: DEBUG oslo_concurrency.lockutils [req-bb205049-26b4-4bcb-b488-fafbb6578da9 req-e18adfed-2ad7-4a83-81f5-da6adc898a64 service nova] Acquiring lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.940092] env[62974]: DEBUG oslo_concurrency.lockutils [req-bb205049-26b4-4bcb-b488-fafbb6578da9 req-e18adfed-2ad7-4a83-81f5-da6adc898a64 service nova] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.940092] env[62974]: DEBUG oslo_concurrency.lockutils [req-bb205049-26b4-4bcb-b488-fafbb6578da9 req-e18adfed-2ad7-4a83-81f5-da6adc898a64 service nova] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.940092] env[62974]: DEBUG nova.compute.manager [req-bb205049-26b4-4bcb-b488-fafbb6578da9 req-e18adfed-2ad7-4a83-81f5-da6adc898a64 service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] No waiting events found dispatching network-vif-plugged-1d12bec7-b3ee-4922-b371-3279eb60ac8f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 711.940092] env[62974]: WARNING nova.compute.manager [req-bb205049-26b4-4bcb-b488-fafbb6578da9 req-e18adfed-2ad7-4a83-81f5-da6adc898a64 service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Received unexpected event network-vif-plugged-1d12bec7-b3ee-4922-b371-3279eb60ac8f for instance with vm_state building and task_state spawning. [ 711.971065] env[62974]: DEBUG nova.network.neutron [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Updating instance_info_cache with network_info: [{"id": "8ad12b0d-168c-4485-b856-6649ee5fe3a4", "address": "fa:16:3e:ee:06:e5", "network": {"id": "c1b62dd4-5cd6-4af4-985f-91c4abc653a7", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1678520260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a368a28171f74897b6d3918fe7915b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ad12b0d-16", "ovs_interfaceid": "8ad12b0d-168c-4485-b856-6649ee5fe3a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.029152] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "refresh_cache-5bc466fb-eebb-40b1-ba09-614a25782ecd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.029391] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquired lock "refresh_cache-5bc466fb-eebb-40b1-ba09-614a25782ecd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.029572] env[62974]: DEBUG nova.network.neutron [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.090392] env[62974]: DEBUG nova.network.neutron [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Successfully updated port: 1d12bec7-b3ee-4922-b371-3279eb60ac8f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 712.157195] env[62974]: DEBUG nova.scheduler.client.report [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.162092] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.260379] env[62974]: INFO nova.compute.manager [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] instance snapshotting [ 712.260663] env[62974]: WARNING nova.compute.manager [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 712.263673] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85c1992-bab3-487d-9dcf-67f82072c72e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.297030] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d56ccd6-6dc7-4705-b9a6-19173ed4d936 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.306338] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654087, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067605} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.309062] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.313117] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924f67ee-3b8b-4cb2-bae1-c6cdabee55c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.338230] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] da43a464-ebae-4038-9f7b-330df22d8d7c/da43a464-ebae-4038-9f7b-330df22d8d7c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.343442] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c74c6cfd-98b5-4f7b-bdee-26c6266eacd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.363451] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654088, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.364839] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 712.364839] env[62974]: value = "task-2654089" [ 712.364839] env[62974]: _type = "Task" [ 712.364839] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.375039] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654089, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.477015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Releasing lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.477015] env[62974]: DEBUG nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Instance network_info: |[{"id": "8ad12b0d-168c-4485-b856-6649ee5fe3a4", "address": "fa:16:3e:ee:06:e5", "network": {"id": "c1b62dd4-5cd6-4af4-985f-91c4abc653a7", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1678520260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a368a28171f74897b6d3918fe7915b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ad12b0d-16", "ovs_interfaceid": "8ad12b0d-168c-4485-b856-6649ee5fe3a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 712.477231] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:06:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ad12b0d-168c-4485-b856-6649ee5fe3a4', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 712.483480] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Creating folder: Project (a368a28171f74897b6d3918fe7915b4e). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.483952] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b06e3d79-18e2-473e-bfc7-0192bfd29847 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.498350] env[62974]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 712.498714] env[62974]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62974) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 712.499211] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Folder already exists: Project (a368a28171f74897b6d3918fe7915b4e). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 712.499527] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Creating folder: Instances. Parent ref: group-v535265. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.499879] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f048201-44e4-469a-8831-26ee039b6492 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.511172] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Created folder: Instances in parent group-v535265. [ 712.511172] env[62974]: DEBUG oslo.service.loopingcall [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.511172] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 712.511172] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff3fc760-5c75-4a81-954a-3515cd543ba0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.530238] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 712.530238] env[62974]: value = "task-2654092" [ 712.530238] env[62974]: _type = "Task" [ 712.530238] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.540033] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654092, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.571689] env[62974]: DEBUG nova.network.neutron [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.595944] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "refresh_cache-8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.595944] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired lock "refresh_cache-8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.595944] env[62974]: DEBUG nova.network.neutron [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.663053] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.663053] env[62974]: DEBUG nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 712.663862] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.144s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.664222] env[62974]: DEBUG nova.objects.instance [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lazy-loading 'resources' on Instance uuid 05742180-08db-45db-9ee0-e359aa8af2f0 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 712.699656] env[62974]: DEBUG nova.network.neutron [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.819025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 712.819025] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-00a44399-0755-498f-b7b0-37cf4b339055 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.825266] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 712.825266] env[62974]: value = "task-2654093" [ 712.825266] env[62974]: _type = "Task" [ 712.825266] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.835695] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654093, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.841077] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563153} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.841480] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.841853] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.842204] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-978bc1d7-8ce0-4ac3-b12f-1f0b83d9a76a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.849958] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 712.849958] env[62974]: value = "task-2654094" [ 712.849958] env[62974]: _type = "Task" [ 712.849958] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.857259] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.874682] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654089, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.043432] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654092, 'name': CreateVM_Task, 'duration_secs': 0.36029} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.043809] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 713.045652] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'guest_format': None, 'device_type': None, 'boot_index': 0, 'attachment_id': '918be264-c3ad-419f-aec8-cc45e7697967', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535277', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'name': 'volume-580d4492-2e68-4792-86d6-404ee3e08942', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a14e7e40-afef-4607-8fa9-935a92ea49dc', 'attached_at': '', 'detached_at': '', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'serial': '580d4492-2e68-4792-86d6-404ee3e08942'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62974) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 713.046289] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Root volume attach. Driver type: vmdk {{(pid=62974) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 713.048028] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4c2674-b873-4685-86c9-206ff0846f6e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.057484] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc5a9c1-2d3b-4c4c-ad47-13886d429bcd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.065757] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1578c6-12bd-4fa5-a7bc-c3a6280dc10a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.075018] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b1e05e50-3510-45ac-8b35-8eb64ccd61f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.083224] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 713.083224] env[62974]: value = "task-2654095" [ 713.083224] env[62974]: _type = "Task" [ 713.083224] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.092935] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.130567] env[62974]: DEBUG nova.network.neutron [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.169266] env[62974]: DEBUG nova.compute.utils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 713.175664] env[62974]: DEBUG nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 713.175921] env[62974]: DEBUG nova.network.neutron [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.201238] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Releasing lock "refresh_cache-5bc466fb-eebb-40b1-ba09-614a25782ecd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.201674] env[62974]: DEBUG nova.compute.manager [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 713.201930] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.207299] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1260bb-731a-4f82-9965-2739f1b5fa56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.220347] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.220613] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ba97298-ee77-4bde-9022-1fd88c600af1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.227260] env[62974]: DEBUG oslo_vmware.api [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 713.227260] env[62974]: value = "task-2654096" [ 713.227260] env[62974]: _type = "Task" [ 713.227260] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.235608] env[62974]: DEBUG oslo_vmware.api [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.285985] env[62974]: DEBUG nova.policy [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37c0028b341743dca487b9fa9351a664', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6670a9c979e41bb9d626141bb979ad9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 713.337554] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654093, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.359847] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069568} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.359847] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.359847] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca17d07f-753a-4f27-b37b-c24d9d07118d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.367389] env[62974]: DEBUG nova.network.neutron [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Updating instance_info_cache with network_info: [{"id": "1d12bec7-b3ee-4922-b371-3279eb60ac8f", "address": "fa:16:3e:85:6c:51", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d12bec7-b3", "ovs_interfaceid": "1d12bec7-b3ee-4922-b371-3279eb60ac8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.395118] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.399396] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92d818a0-a487-4a00-b84b-026b33257dc4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.420423] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654089, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.426381] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 713.426381] env[62974]: value = "task-2654097" [ 713.426381] env[62974]: _type = "Task" [ 713.426381] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.436879] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654097, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.602604] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 34%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.657437] env[62974]: DEBUG nova.network.neutron [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Successfully created port: 5ab4b6f6-7ea8-4151-875b-adaed16c006b {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 713.676248] env[62974]: DEBUG nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 713.749197] env[62974]: DEBUG oslo_vmware.api [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654096, 'name': PowerOffVM_Task, 'duration_secs': 0.214689} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.749786] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.750252] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.750854] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e18877e9-9675-42ee-9531-b1ea71bc83f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.781515] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.781997] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.782942] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleting the datastore file [datastore2] 5bc466fb-eebb-40b1-ba09-614a25782ecd {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.783622] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-871cfa36-dc0d-4da1-a7cb-fd25bded2ebb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.791802] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8af63b-2051-45b1-9fa9-f28e0fa5aa57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.797910] env[62974]: DEBUG oslo_vmware.api [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for the task: (returnval){ [ 713.797910] env[62974]: value = "task-2654099" [ 713.797910] env[62974]: _type = "Task" [ 713.797910] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.806253] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c07dd3-ada8-46dc-ba5e-f2530d5f67a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.815121] env[62974]: DEBUG oslo_vmware.api [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.853573] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b064482-fd6e-4ab8-98c3-b261796c7a7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.863759] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654093, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.867457] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2aec8e1-8cbb-4dd2-865b-ed9ca3b39932 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.877992] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Releasing lock "refresh_cache-8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.879392] env[62974]: DEBUG nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Instance network_info: |[{"id": "1d12bec7-b3ee-4922-b371-3279eb60ac8f", "address": "fa:16:3e:85:6c:51", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d12bec7-b3", "ovs_interfaceid": "1d12bec7-b3ee-4922-b371-3279eb60ac8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 713.880236] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:6c:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d12bec7-b3ee-4922-b371-3279eb60ac8f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 713.889056] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Creating folder: Project (a6670a9c979e41bb9d626141bb979ad9). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.899733] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea2f3b62-0147-4bc8-b3d3-75d654eed599 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.901234] env[62974]: DEBUG nova.compute.provider_tree [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.906781] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654089, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.919773] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Created folder: Project (a6670a9c979e41bb9d626141bb979ad9) in parent group-v535199. [ 713.920110] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Creating folder: Instances. Parent ref: group-v535323. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.920443] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39a57f28-ad14-460d-af39-dec8d549385f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.937468] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Created folder: Instances in parent group-v535323. [ 713.937905] env[62974]: DEBUG oslo.service.loopingcall [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.938706] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 713.938931] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e7144f0-9491-456e-88a2-87511f9a4e79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.959174] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.966130] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 713.966130] env[62974]: value = "task-2654102" [ 713.966130] env[62974]: _type = "Task" [ 713.966130] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.978130] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654102, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.097701] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 49%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.129651] env[62974]: DEBUG nova.compute.manager [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Received event network-changed-1d12bec7-b3ee-4922-b371-3279eb60ac8f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 714.129919] env[62974]: DEBUG nova.compute.manager [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Refreshing instance network info cache due to event network-changed-1d12bec7-b3ee-4922-b371-3279eb60ac8f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 714.135330] env[62974]: DEBUG oslo_concurrency.lockutils [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] Acquiring lock "refresh_cache-8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.135330] env[62974]: DEBUG oslo_concurrency.lockutils [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] Acquired lock "refresh_cache-8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.135330] env[62974]: DEBUG nova.network.neutron [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Refreshing network info cache for port 1d12bec7-b3ee-4922-b371-3279eb60ac8f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 714.317022] env[62974]: DEBUG oslo_vmware.api [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Task: {'id': task-2654099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177075} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.317400] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.317643] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.317870] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.318072] env[62974]: INFO nova.compute.manager [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Took 1.12 seconds to destroy the instance on the hypervisor. [ 714.318336] env[62974]: DEBUG oslo.service.loopingcall [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.318539] env[62974]: DEBUG nova.compute.manager [-] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 714.318633] env[62974]: DEBUG nova.network.neutron [-] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.343602] env[62974]: DEBUG nova.network.neutron [-] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.364640] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654093, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.383490] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654089, 'name': ReconfigVM_Task, 'duration_secs': 1.827428} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.383824] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Reconfigured VM instance instance-0000002a to attach disk [datastore2] da43a464-ebae-4038-9f7b-330df22d8d7c/da43a464-ebae-4038-9f7b-330df22d8d7c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.384520] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87d8aa5c-3cca-42d7-8b1f-0dbe8237da83 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.393855] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 714.393855] env[62974]: value = "task-2654103" [ 714.393855] env[62974]: _type = "Task" [ 714.393855] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.407385] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654103, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.408366] env[62974]: DEBUG nova.scheduler.client.report [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 714.441636] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654097, 'name': ReconfigVM_Task, 'duration_secs': 0.609878} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.441928] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 669cd72c-556f-40b6-8bc2-f50a125c182a/669cd72c-556f-40b6-8bc2-f50a125c182a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.442752] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da36b715-7521-45e3-8ece-9ed3debcac3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.451246] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 714.451246] env[62974]: value = "task-2654104" [ 714.451246] env[62974]: _type = "Task" [ 714.451246] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.462432] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654104, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.478687] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654102, 'name': CreateVM_Task, 'duration_secs': 0.453012} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.478687] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 714.479226] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.479354] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.479632] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 714.479923] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4427c14-176b-4923-8adc-6e5088fbbb58 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.487477] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 714.487477] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b37b2-320a-873f-4346-ff270218cf18" [ 714.487477] env[62974]: _type = "Task" [ 714.487477] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.500832] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b37b2-320a-873f-4346-ff270218cf18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.603688] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 62%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.690020] env[62974]: DEBUG nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 714.730955] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 714.731247] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.731408] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 714.731587] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.731734] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 714.731880] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 714.732261] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 714.732469] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 714.732650] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 714.732847] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 714.733060] env[62974]: DEBUG nova.virt.hardware [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 714.734085] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4098f19-af30-49f3-83af-65a2e2de264b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.746021] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc38372-4e9e-4181-a120-d9843e9107c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.846705] env[62974]: DEBUG nova.network.neutron [-] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.866905] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654093, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.906732] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654103, 'name': Rename_Task, 'duration_secs': 0.231916} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.907897] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.907897] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66107340-c942-4009-a0f3-f4f1165c6727 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.914081] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.250s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.918023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.572s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.919496] env[62974]: INFO nova.compute.claims [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.924133] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 714.924133] env[62974]: value = "task-2654105" [ 714.924133] env[62974]: _type = "Task" [ 714.924133] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.941381] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654105, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.959272] env[62974]: INFO nova.scheduler.client.report [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Deleted allocations for instance 05742180-08db-45db-9ee0-e359aa8af2f0 [ 714.965941] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654104, 'name': Rename_Task, 'duration_secs': 0.191004} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.969152] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.969353] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43953b42-77a5-4b6d-be8e-b10c7c55affe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.980437] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 714.980437] env[62974]: value = "task-2654106" [ 714.980437] env[62974]: _type = "Task" [ 714.980437] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.990024] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.006085] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b37b2-320a-873f-4346-ff270218cf18, 'name': SearchDatastore_Task, 'duration_secs': 0.014106} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.006455] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.006811] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.006969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.007122] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.007302] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.007629] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c60c48d0-c7f7-46e1-82cd-ac881756d77a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.021566] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.021566] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.022350] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df316ba0-0345-41b5-a593-cd69450ed013 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.029390] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 715.029390] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528d0057-8f4a-fbc2-3a60-b7f8ee6a9c6a" [ 715.029390] env[62974]: _type = "Task" [ 715.029390] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.033875] env[62974]: DEBUG nova.network.neutron [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Updated VIF entry in instance network info cache for port 1d12bec7-b3ee-4922-b371-3279eb60ac8f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 715.034420] env[62974]: DEBUG nova.network.neutron [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Updating instance_info_cache with network_info: [{"id": "1d12bec7-b3ee-4922-b371-3279eb60ac8f", "address": "fa:16:3e:85:6c:51", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d12bec7-b3", "ovs_interfaceid": "1d12bec7-b3ee-4922-b371-3279eb60ac8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.043991] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528d0057-8f4a-fbc2-3a60-b7f8ee6a9c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.098315] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 76%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.349738] env[62974]: INFO nova.compute.manager [-] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Took 1.03 seconds to deallocate network for instance. [ 715.370512] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654093, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.446780] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654105, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.458718] env[62974]: DEBUG nova.network.neutron [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Successfully updated port: 5ab4b6f6-7ea8-4151-875b-adaed16c006b {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 715.468074] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fc38a5a-1da0-4edc-8b34-b3f9fffbafd6 tempest-ServersV294TestFqdnHostnames-841949360 tempest-ServersV294TestFqdnHostnames-841949360-project-member] Lock "05742180-08db-45db-9ee0-e359aa8af2f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.601s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.493738] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654106, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.545043] env[62974]: DEBUG oslo_concurrency.lockutils [req-822030b6-0690-4988-8d30-f3a8fd15f757 req-fac4b6f5-7832-47ba-86c2-c2b93bef8c6a service nova] Releasing lock "refresh_cache-8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.545043] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528d0057-8f4a-fbc2-3a60-b7f8ee6a9c6a, 'name': SearchDatastore_Task, 'duration_secs': 0.022057} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.545043] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c815e6e-be4d-4109-8558-dc29ed26f5ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.555077] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 715.555077] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d1b90-bb1c-5ff4-a789-aa705ba31413" [ 715.555077] env[62974]: _type = "Task" [ 715.555077] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.568423] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d1b90-bb1c-5ff4-a789-aa705ba31413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.601209] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.863588] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.870871] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654093, 'name': CreateSnapshot_Task, 'duration_secs': 3.005239} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.870871] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 715.871730] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c311cd6-2011-473a-ab41-15ced7d394d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.953078] env[62974]: DEBUG oslo_vmware.api [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654105, 'name': PowerOnVM_Task, 'duration_secs': 0.817765} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.953078] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.953078] env[62974]: INFO nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Took 10.35 seconds to spawn the instance on the hypervisor. [ 715.953078] env[62974]: DEBUG nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 715.953078] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab37c696-00c1-4275-b52d-15ff2b6570b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.965592] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "refresh_cache-0c2642d5-85fe-4db5-9891-025c88ca8c7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.965749] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired lock "refresh_cache-0c2642d5-85fe-4db5-9891-025c88ca8c7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.965901] env[62974]: DEBUG nova.network.neutron [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 716.003420] env[62974]: DEBUG oslo_vmware.api [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654106, 'name': PowerOnVM_Task, 'duration_secs': 0.635952} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.007630] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 716.007718] env[62974]: DEBUG nova.compute.manager [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.009584] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9470c8-0de6-451f-8fe7-19ca9cfc2ef2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.065397] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d1b90-bb1c-5ff4-a789-aa705ba31413, 'name': SearchDatastore_Task, 'duration_secs': 0.023519} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.065487] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.065849] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7/8bd478ab-a101-4d6a-9e7c-bfde0fce81c7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.068692] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b881a46-b016-4fa2-9f84-f122de7ab5c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.076771] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 716.076771] env[62974]: value = "task-2654107" [ 716.076771] env[62974]: _type = "Task" [ 716.076771] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.088260] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.096582] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 97%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.298554] env[62974]: DEBUG nova.compute.manager [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Received event network-vif-plugged-5ab4b6f6-7ea8-4151-875b-adaed16c006b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 716.298789] env[62974]: DEBUG oslo_concurrency.lockutils [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] Acquiring lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.299057] env[62974]: DEBUG oslo_concurrency.lockutils [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.299247] env[62974]: DEBUG oslo_concurrency.lockutils [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.299417] env[62974]: DEBUG nova.compute.manager [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] No waiting events found dispatching network-vif-plugged-5ab4b6f6-7ea8-4151-875b-adaed16c006b {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 716.300089] env[62974]: WARNING nova.compute.manager [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Received unexpected event network-vif-plugged-5ab4b6f6-7ea8-4151-875b-adaed16c006b for instance with vm_state building and task_state spawning. [ 716.300089] env[62974]: DEBUG nova.compute.manager [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Received event network-changed-5ab4b6f6-7ea8-4151-875b-adaed16c006b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 716.300089] env[62974]: DEBUG nova.compute.manager [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Refreshing instance network info cache due to event network-changed-5ab4b6f6-7ea8-4151-875b-adaed16c006b. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 716.300235] env[62974]: DEBUG oslo_concurrency.lockutils [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] Acquiring lock "refresh_cache-0c2642d5-85fe-4db5-9891-025c88ca8c7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.396019] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 716.396019] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e2cd7056-5e3b-42a3-bec9-cfd211634d54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.405338] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 716.405338] env[62974]: value = "task-2654108" [ 716.405338] env[62974]: _type = "Task" [ 716.405338] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.417423] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654108, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.433239] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690af2a0-7e73-4b1a-b7f8-f26054081722 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.442579] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa84f742-0fad-471d-a36f-ec2f026ff373 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.486817] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8449c21f-daf6-4f90-bbf4-f0425a86a078 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.488611] env[62974]: INFO nova.compute.manager [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Took 55.67 seconds to build instance. [ 716.495965] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefdc57a-04ad-4090-bcba-f6237438ac15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.512880] env[62974]: DEBUG nova.compute.provider_tree [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.530558] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.570968] env[62974]: DEBUG nova.network.neutron [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.586391] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.603412] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 97%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.017038] env[62974]: DEBUG nova.network.neutron [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Updating instance_info_cache with network_info: [{"id": "5ab4b6f6-7ea8-4151-875b-adaed16c006b", "address": "fa:16:3e:9c:f3:c6", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ab4b6f6-7e", "ovs_interfaceid": "5ab4b6f6-7ea8-4151-875b-adaed16c006b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.023023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-15c590d7-a752-4c40-b112-e8b3251643ac tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.262s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.023023] env[62974]: DEBUG nova.scheduler.client.report [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.045068] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654108, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.094707] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654107, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.108346] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task} progress is 98%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.532822] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Releasing lock "refresh_cache-0c2642d5-85fe-4db5-9891-025c88ca8c7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.532822] env[62974]: DEBUG nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Instance network_info: |[{"id": "5ab4b6f6-7ea8-4151-875b-adaed16c006b", "address": "fa:16:3e:9c:f3:c6", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ab4b6f6-7e", "ovs_interfaceid": "5ab4b6f6-7ea8-4151-875b-adaed16c006b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 717.533160] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654108, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.533160] env[62974]: DEBUG oslo_concurrency.lockutils [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] Acquired lock "refresh_cache-0c2642d5-85fe-4db5-9891-025c88ca8c7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.533160] env[62974]: DEBUG nova.network.neutron [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Refreshing network info cache for port 5ab4b6f6-7ea8-4151-875b-adaed16c006b {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.534892] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:f3:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ab4b6f6-7ea8-4151-875b-adaed16c006b', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.546136] env[62974]: DEBUG oslo.service.loopingcall [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.547115] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.547594] env[62974]: DEBUG nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 717.550138] env[62974]: DEBUG nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 717.552281] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 717.555398] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.350s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.555398] env[62974]: DEBUG nova.objects.instance [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lazy-loading 'resources' on Instance uuid 69597c3f-ccb2-474d-bb7c-629c5da0b456 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 717.555398] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a517cf38-e012-4fb5-b157-b8af8d190ca8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.575896] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.575896] env[62974]: value = "task-2654109" [ 717.575896] env[62974]: _type = "Task" [ 717.575896] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.587700] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654109, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.592233] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654107, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.052529} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.592498] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7/8bd478ab-a101-4d6a-9e7c-bfde0fce81c7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 717.592734] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 717.592934] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94e62ffe-28df-4326-9a6c-c8f80e06be21 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.603718] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654095, 'name': RelocateVM_Task, 'duration_secs': 4.357149} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.605249] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 717.605357] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535277', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'name': 'volume-580d4492-2e68-4792-86d6-404ee3e08942', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a14e7e40-afef-4607-8fa9-935a92ea49dc', 'attached_at': '', 'detached_at': '', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'serial': '580d4492-2e68-4792-86d6-404ee3e08942'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 717.605601] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 717.605601] env[62974]: value = "task-2654110" [ 717.605601] env[62974]: _type = "Task" [ 717.605601] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.606354] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12656aba-f2ee-4204-bd6c-1872aabc47e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.617122] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654110, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.628402] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02843410-9445-458d-833f-4c3e7ae26625 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.654229] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] volume-580d4492-2e68-4792-86d6-404ee3e08942/volume-580d4492-2e68-4792-86d6-404ee3e08942.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.654515] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1385d732-ddc7-446b-be11-d3db69e8dc56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.675503] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 717.675503] env[62974]: value = "task-2654111" [ 717.675503] env[62974]: _type = "Task" [ 717.675503] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.683533] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.029377] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654108, 'name': CloneVM_Task, 'duration_secs': 1.511102} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.029651] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Created linked-clone VM from snapshot [ 718.030429] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c27a7e0-7834-4b9b-8ef7-2a64a14ee1af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.042688] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Uploading image a6f12fcd-569e-4086-80cf-7a34fcbb8706 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 718.060575] env[62974]: DEBUG nova.compute.utils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.064696] env[62974]: DEBUG nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.065478] env[62974]: DEBUG nova.network.neutron [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.087698] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.087999] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.094731] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.102485] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654109, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.104633] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 718.104633] env[62974]: value = "vm-535327" [ 718.104633] env[62974]: _type = "VirtualMachine" [ 718.104633] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 718.104902] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ee203f75-c851-440e-a428-2cf0798cb78f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.114702] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease: (returnval){ [ 718.114702] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528f859f-ec54-30d4-4e8a-0bd8a10fec5f" [ 718.114702] env[62974]: _type = "HttpNfcLease" [ 718.114702] env[62974]: } obtained for exporting VM: (result){ [ 718.114702] env[62974]: value = "vm-535327" [ 718.114702] env[62974]: _type = "VirtualMachine" [ 718.114702] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 718.115026] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the lease: (returnval){ [ 718.115026] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528f859f-ec54-30d4-4e8a-0bd8a10fec5f" [ 718.115026] env[62974]: _type = "HttpNfcLease" [ 718.115026] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 718.123198] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654110, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070045} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.124775] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 718.125037] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a1a303-83a2-40a4-8aa8-6e8ec7bc2a3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.134108] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 718.134108] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528f859f-ec54-30d4-4e8a-0bd8a10fec5f" [ 718.134108] env[62974]: _type = "HttpNfcLease" [ 718.134108] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 718.142287] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 718.142287] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528f859f-ec54-30d4-4e8a-0bd8a10fec5f" [ 718.142287] env[62974]: _type = "HttpNfcLease" [ 718.142287] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 718.144751] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b52681-730a-48f0-b040-e212616913d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.174815] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7/8bd478ab-a101-4d6a-9e7c-bfde0fce81c7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 718.180123] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e56eecf1-f3aa-4963-beea-3a1560e3c0c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.203184] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b33276-6679-48f8-25b7-4b0b1e805d92/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 718.204477] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b33276-6679-48f8-25b7-4b0b1e805d92/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 718.206266] env[62974]: DEBUG nova.policy [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcb0068668124811ab0cd555f828c7df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8152f704e86645a0a7e7e81d9edabf30', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.272741] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 718.272741] env[62974]: value = "task-2654113" [ 718.272741] env[62974]: _type = "Task" [ 718.272741] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.277724] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.288169] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654113, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.323927] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-609a0e4c-bf9f-4851-b945-44f486989cdd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.568345] env[62974]: DEBUG nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 718.616449] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654109, 'name': CreateVM_Task, 'duration_secs': 0.627852} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.616921] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.617749] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.617950] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.618815] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.618815] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3369ad59-5e26-4f12-a714-45aed20cb386 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.626319] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 718.626319] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523491ba-dbc9-ac7f-6832-a5a2935758bc" [ 718.626319] env[62974]: _type = "Task" [ 718.626319] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.646551] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523491ba-dbc9-ac7f-6832-a5a2935758bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.710953] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654111, 'name': ReconfigVM_Task, 'duration_secs': 0.600898} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.714599] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Reconfigured VM instance instance-0000002b to attach disk [datastore2] volume-580d4492-2e68-4792-86d6-404ee3e08942/volume-580d4492-2e68-4792-86d6-404ee3e08942.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.723465] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-866dc1bc-43df-4932-8508-678db950fc7f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.739694] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 718.739694] env[62974]: value = "task-2654114" [ 718.739694] env[62974]: _type = "Task" [ 718.739694] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.753823] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654114, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.791705] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654113, 'name': ReconfigVM_Task, 'duration_secs': 0.430454} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.791969] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7/8bd478ab-a101-4d6a-9e7c-bfde0fce81c7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.793866] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f09a42b-0ae4-4f60-8a02-bbe22b2a683b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.804541] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 718.804541] env[62974]: value = "task-2654115" [ 718.804541] env[62974]: _type = "Task" [ 718.804541] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.818622] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654115, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.900796] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4946a6d-32af-4cf5-8a13-6169580ba4bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.910031] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf1f4e1-8083-4265-8fdc-5a934fff8101 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.947113] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f781ca5e-92d7-4d6f-a687-387e01d6a992 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.955786] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263d16b8-6f1c-410c-8986-860fa4c0d44f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.971399] env[62974]: DEBUG nova.compute.provider_tree [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.974176] env[62974]: DEBUG nova.network.neutron [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Updated VIF entry in instance network info cache for port 5ab4b6f6-7ea8-4151-875b-adaed16c006b. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 718.974989] env[62974]: DEBUG nova.network.neutron [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Updating instance_info_cache with network_info: [{"id": "5ab4b6f6-7ea8-4151-875b-adaed16c006b", "address": "fa:16:3e:9c:f3:c6", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ab4b6f6-7e", "ovs_interfaceid": "5ab4b6f6-7ea8-4151-875b-adaed16c006b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.142258] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "669cd72c-556f-40b6-8bc2-f50a125c182a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.142258] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.142258] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "669cd72c-556f-40b6-8bc2-f50a125c182a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.142616] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.142616] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.147855] env[62974]: INFO nova.compute.manager [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Terminating instance [ 719.149939] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523491ba-dbc9-ac7f-6832-a5a2935758bc, 'name': SearchDatastore_Task, 'duration_secs': 0.01388} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.152318] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.152318] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 719.152522] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.152891] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.152891] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 719.153514] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-444396c2-1981-4f50-87c2-9029ea3be822 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.165042] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.165449] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 719.166351] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a16c6a8a-1fcb-49db-857a-d26b6dc2646a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.175630] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 719.175630] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb0ac3-cb73-8c91-cb62-e910aa26af98" [ 719.175630] env[62974]: _type = "Task" [ 719.175630] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.189857] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb0ac3-cb73-8c91-cb62-e910aa26af98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.260111] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654114, 'name': ReconfigVM_Task, 'duration_secs': 0.169133} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.261413] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535277', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'name': 'volume-580d4492-2e68-4792-86d6-404ee3e08942', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a14e7e40-afef-4607-8fa9-935a92ea49dc', 'attached_at': '', 'detached_at': '', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'serial': '580d4492-2e68-4792-86d6-404ee3e08942'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 719.261413] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d72057f-194d-43b4-bba3-d9ee491e4bfb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.272357] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 719.272357] env[62974]: value = "task-2654116" [ 719.272357] env[62974]: _type = "Task" [ 719.272357] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.288737] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654116, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.290174] env[62974]: DEBUG nova.network.neutron [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Successfully created port: 0576c111-5b07-4ceb-be4b-78e565bd0313 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.313765] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654115, 'name': Rename_Task, 'duration_secs': 0.190906} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.314074] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.317594] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-115757ce-e208-4c93-b39c-10af3af6725d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.323983] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 719.323983] env[62974]: value = "task-2654117" [ 719.323983] env[62974]: _type = "Task" [ 719.323983] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.334542] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.479103] env[62974]: DEBUG nova.scheduler.client.report [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.486715] env[62974]: DEBUG oslo_concurrency.lockutils [req-02ce07d9-62d7-4a4f-8614-6f811ae73e2e req-44f44855-194c-4fa7-b7b0-1fc6a3e5db0a service nova] Releasing lock "refresh_cache-0c2642d5-85fe-4db5-9891-025c88ca8c7c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.580219] env[62974]: DEBUG nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 719.618773] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.619159] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.619363] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.619598] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.619791] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.619992] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.620390] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.620477] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.621241] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.621241] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.621241] env[62974]: DEBUG nova.virt.hardware [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.621902] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c24369-2218-494b-a48c-db811a84c952 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.631745] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1e2947-fa90-42ec-afdd-ee52d6884060 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.665020] env[62974]: DEBUG nova.compute.manager [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 719.665020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.665020] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9a1d7c-41e0-44b3-9dd9-82545e0818ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.675324] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 719.675553] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6f189bd-9a5d-416f-9651-cf1b95fc39f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.692403] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb0ac3-cb73-8c91-cb62-e910aa26af98, 'name': SearchDatastore_Task, 'duration_secs': 0.012898} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.693800] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 719.693800] env[62974]: value = "task-2654118" [ 719.693800] env[62974]: _type = "Task" [ 719.693800] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.694131] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1694720a-afbf-42ab-a847-3cb16f610cbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.707045] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 719.707045] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5222e7b6-727e-4c2b-ba11-356ef72a470e" [ 719.707045] env[62974]: _type = "Task" [ 719.707045] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.707793] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.715814] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5222e7b6-727e-4c2b-ba11-356ef72a470e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.783848] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654116, 'name': Rename_Task, 'duration_secs': 0.192175} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.783848] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.783848] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e267e601-41fa-4160-a205-4fb3a611c295 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.794530] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 719.794530] env[62974]: value = "task-2654119" [ 719.794530] env[62974]: _type = "Task" [ 719.794530] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.806283] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.834919] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654117, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.929907] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.930248] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.988962] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.435s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.992417] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.569s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.992981] env[62974]: DEBUG nova.objects.instance [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lazy-loading 'resources' on Instance uuid 2ebb3385-4177-4506-a4b0-52b53405cf49 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 720.026478] env[62974]: INFO nova.scheduler.client.report [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Deleted allocations for instance 69597c3f-ccb2-474d-bb7c-629c5da0b456 [ 720.208831] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.218049] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5222e7b6-727e-4c2b-ba11-356ef72a470e, 'name': SearchDatastore_Task, 'duration_secs': 0.013657} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.218391] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.218661] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 0c2642d5-85fe-4db5-9891-025c88ca8c7c/0c2642d5-85fe-4db5-9891-025c88ca8c7c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.218991] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-556d0c35-5246-485f-91e1-6d2488aa5d1e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.228174] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 720.228174] env[62974]: value = "task-2654120" [ 720.228174] env[62974]: _type = "Task" [ 720.228174] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.239596] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654120, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.304895] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654119, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.334919] env[62974]: DEBUG oslo_vmware.api [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654117, 'name': PowerOnVM_Task, 'duration_secs': 0.591508} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.337335] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.337434] env[62974]: INFO nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Took 9.26 seconds to spawn the instance on the hypervisor. [ 720.337646] env[62974]: DEBUG nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.338769] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ce778f-311d-4e27-96fa-8e421b11fc95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.543501] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d27a62b2-0158-4295-9c90-ffb483c9b90d tempest-ServerDiagnosticsV248Test-1487971474 tempest-ServerDiagnosticsV248Test-1487971474-project-member] Lock "69597c3f-ccb2-474d-bb7c-629c5da0b456" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.581s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.714138] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.742378] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654120, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.811180] env[62974]: DEBUG oslo_vmware.api [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654119, 'name': PowerOnVM_Task, 'duration_secs': 0.511233} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.815065] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.816026] env[62974]: INFO nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Took 10.66 seconds to spawn the instance on the hypervisor. [ 720.816026] env[62974]: DEBUG nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.816619] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5570c3d5-6081-46e4-b3cd-a623564e6b95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.857731] env[62974]: INFO nova.compute.manager [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Took 52.27 seconds to build instance. [ 721.084144] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6ae5cb-2b7e-4be4-8651-43133b500d4a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.093589] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83aca6bd-7b8c-4040-bdf7-b0ed09693841 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.132113] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0143903e-4ec8-4d91-bc29-2cc5aaea75b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.140077] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4ae086-b829-4a79-a0aa-8b000468e8ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.157370] env[62974]: DEBUG nova.compute.provider_tree [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.214757] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.240129] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654120, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588292} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.240129] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 0c2642d5-85fe-4db5-9891-025c88ca8c7c/0c2642d5-85fe-4db5-9891-025c88ca8c7c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.241479] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.241479] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a11f039-982d-4b13-b515-a80d81f09798 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.249859] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 721.249859] env[62974]: value = "task-2654121" [ 721.249859] env[62974]: _type = "Task" [ 721.249859] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.258679] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.328538] env[62974]: DEBUG nova.network.neutron [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Successfully updated port: 0576c111-5b07-4ceb-be4b-78e565bd0313 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.345020] env[62974]: INFO nova.compute.manager [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Took 56.15 seconds to build instance. [ 721.360521] env[62974]: DEBUG oslo_concurrency.lockutils [None req-712fdced-8cd9-49cb-a0f3-d256dcc8a391 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.802s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.466964] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "5d6a072e-dba7-461d-9d41-8ca003b31102" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.467314] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.506887] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.507239] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.538050] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "1c7fabf7-ba82-4628-9016-b0f198add99a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.538631] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.572740] env[62974]: DEBUG nova.compute.manager [req-a1845646-89f3-4102-a41c-ba3b80add34c req-566da838-201f-4d8f-a7e6-454f88e5a5b4 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Received event network-vif-plugged-0576c111-5b07-4ceb-be4b-78e565bd0313 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 721.572960] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1845646-89f3-4102-a41c-ba3b80add34c req-566da838-201f-4d8f-a7e6-454f88e5a5b4 service nova] Acquiring lock "3426d512-d54e-4852-8eca-8ba9f5fef418-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.573186] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1845646-89f3-4102-a41c-ba3b80add34c req-566da838-201f-4d8f-a7e6-454f88e5a5b4 service nova] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.573356] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1845646-89f3-4102-a41c-ba3b80add34c req-566da838-201f-4d8f-a7e6-454f88e5a5b4 service nova] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.573521] env[62974]: DEBUG nova.compute.manager [req-a1845646-89f3-4102-a41c-ba3b80add34c req-566da838-201f-4d8f-a7e6-454f88e5a5b4 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] No waiting events found dispatching network-vif-plugged-0576c111-5b07-4ceb-be4b-78e565bd0313 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 721.573682] env[62974]: WARNING nova.compute.manager [req-a1845646-89f3-4102-a41c-ba3b80add34c req-566da838-201f-4d8f-a7e6-454f88e5a5b4 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Received unexpected event network-vif-plugged-0576c111-5b07-4ceb-be4b-78e565bd0313 for instance with vm_state building and task_state spawning. [ 721.661142] env[62974]: DEBUG nova.scheduler.client.report [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.715741] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.744136] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.744362] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.758957] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.837419] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.837419] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.837419] env[62974]: DEBUG nova.network.neutron [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.847142] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0ed0c251-e37f-483f-98ef-5ed4886d742e tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.281s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.864514] env[62974]: DEBUG nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.167612] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.169615] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.739s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.169842] env[62974]: DEBUG nova.objects.instance [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lazy-loading 'resources' on Instance uuid ea2227ff-f694-4baa-af17-dc50338d8fa6 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 722.196912] env[62974]: INFO nova.scheduler.client.report [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted allocations for instance 2ebb3385-4177-4506-a4b0-52b53405cf49 [ 722.215835] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654118, 'name': PowerOffVM_Task, 'duration_secs': 2.315411} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.216203] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 722.216676] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 722.216676] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1aa29c4-d196-4147-96b1-bc6730f59bbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.267089] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.667171} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.267089] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.267492] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec925529-17ef-47d7-b60f-e1f55fedc568 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.294818] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 0c2642d5-85fe-4db5-9891-025c88ca8c7c/0c2642d5-85fe-4db5-9891-025c88ca8c7c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.296725] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba58245d-04e0-4e9c-a376-ee7744785e8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.314046] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 722.314046] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 722.314046] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleting the datastore file [datastore2] 669cd72c-556f-40b6-8bc2-f50a125c182a {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 722.314046] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0b81a8c-178f-4af5-bd97-71c37dd2d32e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.323052] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 722.323052] env[62974]: value = "task-2654123" [ 722.323052] env[62974]: _type = "Task" [ 722.323052] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.323052] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 722.323052] env[62974]: value = "task-2654124" [ 722.323052] env[62974]: _type = "Task" [ 722.323052] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.333990] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654123, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.339022] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.350324] env[62974]: DEBUG nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.392634] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.569411] env[62974]: DEBUG nova.network.neutron [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.575956] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.576229] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.709814] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3025ab62-3f5e-428a-a3e6-79ff98f043ee tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "2ebb3385-4177-4506-a4b0-52b53405cf49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.965s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.813154] env[62974]: DEBUG nova.network.neutron [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Updating instance_info_cache with network_info: [{"id": "0576c111-5b07-4ceb-be4b-78e565bd0313", "address": "fa:16:3e:3f:7d:6e", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0576c111-5b", "ovs_interfaceid": "0576c111-5b07-4ceb-be4b-78e565bd0313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.844609] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654123, 'name': ReconfigVM_Task, 'duration_secs': 0.47485} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.848183] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 0c2642d5-85fe-4db5-9891-025c88ca8c7c/0c2642d5-85fe-4db5-9891-025c88ca8c7c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.849369] env[62974]: DEBUG oslo_vmware.api [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268333} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.849451] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4cab8f8c-277a-4cf8-871c-264eeaf2001a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.851254] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 722.851439] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 722.851628] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 722.852832] env[62974]: INFO nova.compute.manager [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Took 3.19 seconds to destroy the instance on the hypervisor. [ 722.852832] env[62974]: DEBUG oslo.service.loopingcall [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 722.852832] env[62974]: DEBUG nova.compute.manager [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 722.852832] env[62974]: DEBUG nova.network.neutron [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.859480] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 722.859480] env[62974]: value = "task-2654125" [ 722.859480] env[62974]: _type = "Task" [ 722.859480] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.876637] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654125, 'name': Rename_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.880134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.227880] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "0bc05477-1802-4f8b-8d23-2742f9baf603" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.227880] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.318344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.318691] env[62974]: DEBUG nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Instance network_info: |[{"id": "0576c111-5b07-4ceb-be4b-78e565bd0313", "address": "fa:16:3e:3f:7d:6e", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0576c111-5b", "ovs_interfaceid": "0576c111-5b07-4ceb-be4b-78e565bd0313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 723.319208] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:7d:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '163e60bd-32d6-41c5-95e6-2eb10c5c9245', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0576c111-5b07-4ceb-be4b-78e565bd0313', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 723.326707] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating folder: Project (8152f704e86645a0a7e7e81d9edabf30). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.327065] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4de3e5af-966f-4698-9cdd-328e438a8b05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.349369] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created folder: Project (8152f704e86645a0a7e7e81d9edabf30) in parent group-v535199. [ 723.349369] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating folder: Instances. Parent ref: group-v535329. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.350207] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d553781-1ab1-4f67-8ae2-c086bb16090e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.352570] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7289ae97-8b7c-426f-8de3-fd36b258a4d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.362158] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c889b8-d266-4a93-9f77-7bf866a3207a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.367191] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created folder: Instances in parent group-v535329. [ 723.370020] env[62974]: DEBUG oslo.service.loopingcall [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.371787] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 723.372170] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01ebeffc-362a-40a0-9415-66b8f5754341 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.415563] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3832f7a8-57ed-45f4-bf23-ed75e4b3f5b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.420677] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654125, 'name': Rename_Task, 'duration_secs': 0.178775} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.422401] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.422641] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 723.422641] env[62974]: value = "task-2654128" [ 723.422641] env[62974]: _type = "Task" [ 723.422641] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.422823] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4ca1624-3842-40eb-a0a9-f8c3fa38578b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.430611] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59dbf0d-a62d-4e16-8c7a-e8e2ee5d8b62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.436042] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 723.436042] env[62974]: value = "task-2654129" [ 723.436042] env[62974]: _type = "Task" [ 723.436042] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.452301] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654128, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.452566] env[62974]: DEBUG nova.compute.provider_tree [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.459411] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654129, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.611169] env[62974]: DEBUG nova.compute.manager [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Received event network-changed-0576c111-5b07-4ceb-be4b-78e565bd0313 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 723.611449] env[62974]: DEBUG nova.compute.manager [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Refreshing instance network info cache due to event network-changed-0576c111-5b07-4ceb-be4b-78e565bd0313. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 723.611731] env[62974]: DEBUG oslo_concurrency.lockutils [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] Acquiring lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.611944] env[62974]: DEBUG oslo_concurrency.lockutils [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] Acquired lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.612247] env[62974]: DEBUG nova.network.neutron [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Refreshing network info cache for port 0576c111-5b07-4ceb-be4b-78e565bd0313 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.849467] env[62974]: DEBUG nova.network.neutron [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.936781] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654128, 'name': CreateVM_Task, 'duration_secs': 0.459268} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.936781] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 723.940825] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.941030] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.941455] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 723.941657] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4b07057-f6b2-47ca-8763-0b5bc26acaf4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.950545] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654129, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.952684] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 723.952684] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b201ce-b145-27c3-2acf-2f86d1184c18" [ 723.952684] env[62974]: _type = "Task" [ 723.952684] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.957468] env[62974]: DEBUG nova.scheduler.client.report [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.967355] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b201ce-b145-27c3-2acf-2f86d1184c18, 'name': SearchDatastore_Task, 'duration_secs': 0.011726} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.967704] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.968034] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 723.968293] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.968455] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.968727] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.969052] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-942e9285-bf36-4285-bf8d-d038bc421f73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.978206] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.978206] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 723.978584] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a9f57c5-3e5d-4922-87f5-6c59f3917cbc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.984240] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 723.984240] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5207c331-7d1e-ce81-73ed-33ff0c9f2202" [ 723.984240] env[62974]: _type = "Task" [ 723.984240] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.993585] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5207c331-7d1e-ce81-73ed-33ff0c9f2202, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.149097] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.149383] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.355516] env[62974]: INFO nova.compute.manager [-] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Took 1.50 seconds to deallocate network for instance. [ 724.417685] env[62974]: DEBUG nova.network.neutron [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Updated VIF entry in instance network info cache for port 0576c111-5b07-4ceb-be4b-78e565bd0313. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 724.417685] env[62974]: DEBUG nova.network.neutron [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Updating instance_info_cache with network_info: [{"id": "0576c111-5b07-4ceb-be4b-78e565bd0313", "address": "fa:16:3e:3f:7d:6e", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0576c111-5b", "ovs_interfaceid": "0576c111-5b07-4ceb-be4b-78e565bd0313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.452285] env[62974]: DEBUG oslo_vmware.api [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654129, 'name': PowerOnVM_Task, 'duration_secs': 0.634927} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.452815] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.453218] env[62974]: INFO nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Took 9.76 seconds to spawn the instance on the hypervisor. [ 724.453559] env[62974]: DEBUG nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.454498] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f11a456-60d3-40d8-b379-b9e573a8411b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.464505] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.295s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.466768] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.315s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.471272] env[62974]: INFO nova.compute.claims [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.500357] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5207c331-7d1e-ce81-73ed-33ff0c9f2202, 'name': SearchDatastore_Task, 'duration_secs': 0.012184} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.505520] env[62974]: INFO nova.scheduler.client.report [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Deleted allocations for instance ea2227ff-f694-4baa-af17-dc50338d8fa6 [ 724.510949] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97834b0c-159a-43a6-b315-3eeface16b05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.520248] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 724.520248] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cfec5e-7ce2-f33a-28dc-873203043ad0" [ 724.520248] env[62974]: _type = "Task" [ 724.520248] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.534178] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cfec5e-7ce2-f33a-28dc-873203043ad0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.863025] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.920045] env[62974]: DEBUG oslo_concurrency.lockutils [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] Releasing lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.921412] env[62974]: DEBUG nova.compute.manager [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Received event network-changed-8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 724.921412] env[62974]: DEBUG nova.compute.manager [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Refreshing instance network info cache due to event network-changed-8ad12b0d-168c-4485-b856-6649ee5fe3a4. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 724.921412] env[62974]: DEBUG oslo_concurrency.lockutils [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] Acquiring lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.921412] env[62974]: DEBUG oslo_concurrency.lockutils [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] Acquired lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.921412] env[62974]: DEBUG nova.network.neutron [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Refreshing network info cache for port 8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.978610] env[62974]: INFO nova.compute.manager [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Took 49.41 seconds to build instance. [ 725.019589] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eb55eeed-63b3-4e0d-92f9-60cda337088e tempest-MultipleCreateTestJSON-388179146 tempest-MultipleCreateTestJSON-388179146-project-member] Lock "ea2227ff-f694-4baa-af17-dc50338d8fa6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.375s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.031979] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cfec5e-7ce2-f33a-28dc-873203043ad0, 'name': SearchDatastore_Task, 'duration_secs': 0.015858} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.032089] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.032994] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/3426d512-d54e-4852-8eca-8ba9f5fef418.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.032994] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec77574c-60a7-41c3-8bf0-c7dc3a8266f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.041653] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 725.041653] env[62974]: value = "task-2654130" [ 725.041653] env[62974]: _type = "Task" [ 725.041653] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.051285] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.482643] env[62974]: DEBUG oslo_concurrency.lockutils [None req-58659924-58e6-470e-91db-ddad02186c01 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.665s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.572494] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654130, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.719665] env[62974]: DEBUG nova.network.neutron [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Updated VIF entry in instance network info cache for port 8ad12b0d-168c-4485-b856-6649ee5fe3a4. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.720106] env[62974]: DEBUG nova.network.neutron [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Updating instance_info_cache with network_info: [{"id": "8ad12b0d-168c-4485-b856-6649ee5fe3a4", "address": "fa:16:3e:ee:06:e5", "network": {"id": "c1b62dd4-5cd6-4af4-985f-91c4abc653a7", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1678520260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a368a28171f74897b6d3918fe7915b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ad12b0d-16", "ovs_interfaceid": "8ad12b0d-168c-4485-b856-6649ee5fe3a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.991818] env[62974]: DEBUG nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 726.061819] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.745603} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.062311] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/3426d512-d54e-4852-8eca-8ba9f5fef418.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.062524] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.062779] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ad9f91a-55e5-4ceb-9f13-5c44a5657edc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.070108] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 726.070108] env[62974]: value = "task-2654131" [ 726.070108] env[62974]: _type = "Task" [ 726.070108] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.079306] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.139571] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b50acf-0644-4124-8bf6-26572f2f5c23 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.147425] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204325c3-3e00-4b70-8bc8-570ff36f4606 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.181037] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8557cdb5-1917-44d4-81eb-23957f59eb00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.189248] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c116699-cfe7-45e2-b698-939eb546c436 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.203766] env[62974]: DEBUG nova.compute.provider_tree [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.223481] env[62974]: DEBUG oslo_concurrency.lockutils [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] Releasing lock "refresh_cache-a14e7e40-afef-4607-8fa9-935a92ea49dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.223784] env[62974]: DEBUG nova.compute.manager [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Received event network-vif-deleted-b71f7882-2184-4093-856d-a4bbc389dd03 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 726.223991] env[62974]: INFO nova.compute.manager [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Neutron deleted interface b71f7882-2184-4093-856d-a4bbc389dd03; detaching it from the instance and deleting it from the info cache [ 726.224191] env[62974]: DEBUG nova.network.neutron [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.509287] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.581044] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654131, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.250187} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.581494] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.583134] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a59bfa-d436-4fab-9827-b00a5bcb1cde {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.612196] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/3426d512-d54e-4852-8eca-8ba9f5fef418.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.612519] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff7d3954-aaf1-48f8-a28e-e93424aa0e60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.634749] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 726.634749] env[62974]: value = "task-2654132" [ 726.634749] env[62974]: _type = "Task" [ 726.634749] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.649936] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654132, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.707202] env[62974]: DEBUG nova.scheduler.client.report [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.729344] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a26562c-a91c-489b-981f-338ec29c8bdf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.739077] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b60147a-5a81-47ae-b502-6fb1a01c9180 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.774032] env[62974]: DEBUG nova.compute.manager [req-e2355e8b-1eac-43f3-85fa-465286c1b8f9 req-157cf489-6762-4281-a3d0-4d8b2fb96360 service nova] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Detach interface failed, port_id=b71f7882-2184-4093-856d-a4bbc389dd03, reason: Instance 669cd72c-556f-40b6-8bc2-f50a125c182a could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 727.145642] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654132, 'name': ReconfigVM_Task, 'duration_secs': 0.341674} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.145921] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/3426d512-d54e-4852-8eca-8ba9f5fef418.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.146727] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e73e879d-f1a8-4faf-9707-6cd8d8915b11 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.155171] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 727.155171] env[62974]: value = "task-2654133" [ 727.155171] env[62974]: _type = "Task" [ 727.155171] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.166576] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654133, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.213717] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.213717] env[62974]: DEBUG nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 727.217566] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.998s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.217566] env[62974]: DEBUG nova.objects.instance [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lazy-loading 'resources' on Instance uuid f9adcd7e-58a0-433c-8602-cca814b84aaa {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.666032] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654133, 'name': Rename_Task, 'duration_secs': 0.191857} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.667248] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.667248] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc124cb3-afe2-42db-9c2d-6fcc5212c3ea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.674652] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 727.674652] env[62974]: value = "task-2654134" [ 727.674652] env[62974]: _type = "Task" [ 727.674652] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.689871] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.719254] env[62974]: DEBUG nova.compute.utils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 727.724207] env[62974]: DEBUG nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 727.724836] env[62974]: DEBUG nova.network.neutron [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 727.773224] env[62974]: DEBUG nova.policy [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcb0068668124811ab0cd555f828c7df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8152f704e86645a0a7e7e81d9edabf30', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 727.818966] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b33276-6679-48f8-25b7-4b0b1e805d92/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 727.820125] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ab332c-723a-46dd-8b90-b90aaeae12ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.830456] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b33276-6679-48f8-25b7-4b0b1e805d92/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 727.830753] env[62974]: ERROR oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b33276-6679-48f8-25b7-4b0b1e805d92/disk-0.vmdk due to incomplete transfer. [ 727.831147] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ec9cdf1a-cb64-47fa-8acc-cda991679869 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.840445] env[62974]: DEBUG oslo_vmware.rw_handles [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b33276-6679-48f8-25b7-4b0b1e805d92/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 727.840445] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Uploaded image a6f12fcd-569e-4086-80cf-7a34fcbb8706 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 727.843097] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 727.845885] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2c82e142-0923-4ab4-8cb3-b6ffd58512df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.854914] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 727.854914] env[62974]: value = "task-2654135" [ 727.854914] env[62974]: _type = "Task" [ 727.854914] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.860871] env[62974]: DEBUG nova.compute.manager [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 727.865656] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d354cf-6d87-421f-91a4-79c49831c4f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.869404] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654135, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.131669] env[62974]: DEBUG nova.network.neutron [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Successfully created port: 3b0107e6-4f52-40dc-90c3-d21197cbdf34 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.189949] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654134, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.227237] env[62974]: DEBUG nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 728.369062] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654135, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.377741] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78520f28-1ce4-4899-8776-8d9f90a4b55a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.381538] env[62974]: INFO nova.compute.manager [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] instance snapshotting [ 728.385412] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025832cb-83eb-4981-a210-636b9810dff0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.391364] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e8b22d-6f82-48c8-b876-cdbdad7b10b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.409698] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeba17f7-a307-48ab-b356-0c379407207a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.438566] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2ced87-b518-4d3e-b723-f9533586653e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.451281] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8837574-6765-4d7b-b6d0-1e867f0f152d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.468051] env[62974]: DEBUG nova.compute.provider_tree [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.686392] env[62974]: DEBUG oslo_vmware.api [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654134, 'name': PowerOnVM_Task, 'duration_secs': 0.825276} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.686732] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.687032] env[62974]: INFO nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Took 9.11 seconds to spawn the instance on the hypervisor. [ 728.687032] env[62974]: DEBUG nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.687796] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848b0372-8535-4eb9-88f5-38d3894322a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.873205] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654135, 'name': Destroy_Task, 'duration_secs': 0.753041} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.873493] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Destroyed the VM [ 728.873735] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 728.873993] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-018da82e-5c4d-4c36-aa61-72b00e9e8222 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.881057] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 728.881057] env[62974]: value = "task-2654136" [ 728.881057] env[62974]: _type = "Task" [ 728.881057] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.893900] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654136, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.947962] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 728.948313] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c11854b9-e0cd-42ef-9f20-771a34d6b404 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.971989] env[62974]: DEBUG nova.scheduler.client.report [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 729.028169] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 729.028169] env[62974]: value = "task-2654137" [ 729.028169] env[62974]: _type = "Task" [ 729.028169] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.045973] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654137, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.209282] env[62974]: INFO nova.compute.manager [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Took 46.88 seconds to build instance. [ 729.215419] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "6e8f07c2-60da-4bad-a7af-8c83294e232f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.215486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "6e8f07c2-60da-4bad-a7af-8c83294e232f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.243037] env[62974]: DEBUG nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 729.268680] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 729.269114] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.269183] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 729.269338] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.269483] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 729.269627] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 729.269828] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 729.270065] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 729.270158] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 729.270328] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 729.270486] env[62974]: DEBUG nova.virt.hardware [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 729.271455] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf54352-d415-47c3-91e7-188bde239274 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.279680] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b4558a-504f-4759-92e3-9cebcbf54be7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.391045] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654136, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.533334] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.314s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.533334] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.354s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.535092] env[62974]: INFO nova.compute.claims [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.546896] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654137, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.561581] env[62974]: INFO nova.scheduler.client.report [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Deleted allocations for instance f9adcd7e-58a0-433c-8602-cca814b84aaa [ 729.683134] env[62974]: DEBUG nova.network.neutron [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Successfully updated port: 3b0107e6-4f52-40dc-90c3-d21197cbdf34 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.711609] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c1c4079-2d9b-448e-bcbe-18bc856ad0b5 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.349s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.772982] env[62974]: DEBUG nova.compute.manager [req-3238b5f7-2251-49e4-b30e-18421cc4272c req-6a10d9f1-3f6b-4006-a7c3-55c4dd92d617 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Received event network-vif-plugged-3b0107e6-4f52-40dc-90c3-d21197cbdf34 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 729.773304] env[62974]: DEBUG oslo_concurrency.lockutils [req-3238b5f7-2251-49e4-b30e-18421cc4272c req-6a10d9f1-3f6b-4006-a7c3-55c4dd92d617 service nova] Acquiring lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.773601] env[62974]: DEBUG oslo_concurrency.lockutils [req-3238b5f7-2251-49e4-b30e-18421cc4272c req-6a10d9f1-3f6b-4006-a7c3-55c4dd92d617 service nova] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.773862] env[62974]: DEBUG oslo_concurrency.lockutils [req-3238b5f7-2251-49e4-b30e-18421cc4272c req-6a10d9f1-3f6b-4006-a7c3-55c4dd92d617 service nova] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.775082] env[62974]: DEBUG nova.compute.manager [req-3238b5f7-2251-49e4-b30e-18421cc4272c req-6a10d9f1-3f6b-4006-a7c3-55c4dd92d617 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] No waiting events found dispatching network-vif-plugged-3b0107e6-4f52-40dc-90c3-d21197cbdf34 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 729.775340] env[62974]: WARNING nova.compute.manager [req-3238b5f7-2251-49e4-b30e-18421cc4272c req-6a10d9f1-3f6b-4006-a7c3-55c4dd92d617 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Received unexpected event network-vif-plugged-3b0107e6-4f52-40dc-90c3-d21197cbdf34 for instance with vm_state building and task_state spawning. [ 729.891027] env[62974]: DEBUG oslo_vmware.api [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654136, 'name': RemoveSnapshot_Task, 'duration_secs': 1.007401} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.891318] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 729.891674] env[62974]: INFO nova.compute.manager [None req-b8aaa77e-cd30-4c32-b634-c1912bc6d4a0 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Took 17.63 seconds to snapshot the instance on the hypervisor. [ 730.044366] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654137, 'name': CreateSnapshot_Task, 'duration_secs': 1.060357} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.044830] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 730.045590] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5f5983-c3a1-45f0-b816-654181fd36d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.073291] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b7a7fb3-1310-409d-8f59-2ee4bc0f8bbb tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "f9adcd7e-58a0-433c-8602-cca814b84aaa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.492s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.185426] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.185597] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.185750] env[62974]: DEBUG nova.network.neutron [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.214666] env[62974]: DEBUG nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 730.563247] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 730.563575] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c990faf7-f15b-4e8b-878d-2a894ba0bb88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.572573] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 730.572573] env[62974]: value = "task-2654138" [ 730.572573] env[62974]: _type = "Task" [ 730.572573] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.583609] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654138, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.609434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "d941a678-1b67-4e0f-8806-e6682ef21774" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.610806] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "d941a678-1b67-4e0f-8806-e6682ef21774" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.611064] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "d941a678-1b67-4e0f-8806-e6682ef21774-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.611266] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "d941a678-1b67-4e0f-8806-e6682ef21774-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.611501] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "d941a678-1b67-4e0f-8806-e6682ef21774-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.617949] env[62974]: INFO nova.compute.manager [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Terminating instance [ 730.727260] env[62974]: DEBUG nova.network.neutron [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.749106] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.867886] env[62974]: DEBUG nova.network.neutron [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Updating instance_info_cache with network_info: [{"id": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "address": "fa:16:3e:26:69:10", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b0107e6-4f", "ovs_interfaceid": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.084922] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654138, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.119896] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a00acb7-aa6d-45a2-8c28-d51753f1b555 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.122852] env[62974]: DEBUG nova.compute.manager [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 731.123152] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.124570] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5fc299-669c-478a-8095-36227d6f1bc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.135140] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7012008-42cc-4a23-a314-727dc728760a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.138309] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.139046] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e28411fc-0524-4f46-8b99-e400472e47bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.175246] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2ddb88-335f-43c8-8a6f-ef8e0e0bbf34 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.182918] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16da28e-efed-4b5c-8abd-908c45dbe930 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.203121] env[62974]: DEBUG nova.compute.provider_tree [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.213717] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.214062] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.214172] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleting the datastore file [datastore1] d941a678-1b67-4e0f-8806-e6682ef21774 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.215049] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91f2b3f9-b71d-4517-84ea-7523a9e9e578 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.222685] env[62974]: DEBUG oslo_vmware.api [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 731.222685] env[62974]: value = "task-2654140" [ 731.222685] env[62974]: _type = "Task" [ 731.222685] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.233094] env[62974]: DEBUG oslo_vmware.api [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.236425] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.236652] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.370119] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.370509] env[62974]: DEBUG nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Instance network_info: |[{"id": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "address": "fa:16:3e:26:69:10", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b0107e6-4f", "ovs_interfaceid": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 731.370959] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:69:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '163e60bd-32d6-41c5-95e6-2eb10c5c9245', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b0107e6-4f52-40dc-90c3-d21197cbdf34', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 731.378766] env[62974]: DEBUG oslo.service.loopingcall [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.379022] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 731.379282] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bba4c445-8575-4cef-bd67-f9df18d1c72c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.399939] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 731.399939] env[62974]: value = "task-2654141" [ 731.399939] env[62974]: _type = "Task" [ 731.399939] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.410710] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654141, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.584610] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654138, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.708020] env[62974]: DEBUG nova.scheduler.client.report [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.735330] env[62974]: DEBUG oslo_vmware.api [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184857} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.735667] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.735786] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.736126] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.736425] env[62974]: INFO nova.compute.manager [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Took 0.61 seconds to destroy the instance on the hypervisor. [ 731.736722] env[62974]: DEBUG oslo.service.loopingcall [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.736974] env[62974]: DEBUG nova.compute.manager [-] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 731.737198] env[62974]: DEBUG nova.network.neutron [-] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.823347] env[62974]: DEBUG nova.compute.manager [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Received event network-changed-3b0107e6-4f52-40dc-90c3-d21197cbdf34 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 731.823441] env[62974]: DEBUG nova.compute.manager [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Refreshing instance network info cache due to event network-changed-3b0107e6-4f52-40dc-90c3-d21197cbdf34. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 731.823749] env[62974]: DEBUG oslo_concurrency.lockutils [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] Acquiring lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.823847] env[62974]: DEBUG oslo_concurrency.lockutils [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] Acquired lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.824199] env[62974]: DEBUG nova.network.neutron [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Refreshing network info cache for port 3b0107e6-4f52-40dc-90c3-d21197cbdf34 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 731.912880] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654141, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.084718] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654138, 'name': CloneVM_Task, 'duration_secs': 1.404406} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.085477] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Created linked-clone VM from snapshot [ 732.086398] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99480e0-6f4e-4665-986b-3e1a567a816a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.094218] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Uploading image 3a433e01-7c04-4818-8d24-b093284570d1 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 732.125217] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 732.125217] env[62974]: value = "vm-535333" [ 732.125217] env[62974]: _type = "VirtualMachine" [ 732.125217] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 732.125514] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6c62738c-3de2-4f8b-9f06-df8fa4273d12 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.132784] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lease: (returnval){ [ 732.132784] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ebada-d922-a92c-e682-293961a04fca" [ 732.132784] env[62974]: _type = "HttpNfcLease" [ 732.132784] env[62974]: } obtained for exporting VM: (result){ [ 732.132784] env[62974]: value = "vm-535333" [ 732.132784] env[62974]: _type = "VirtualMachine" [ 732.132784] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 732.133037] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the lease: (returnval){ [ 732.133037] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ebada-d922-a92c-e682-293961a04fca" [ 732.133037] env[62974]: _type = "HttpNfcLease" [ 732.133037] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 732.139823] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 732.139823] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ebada-d922-a92c-e682-293961a04fca" [ 732.139823] env[62974]: _type = "HttpNfcLease" [ 732.139823] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 732.213406] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.213977] env[62974]: DEBUG nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 732.218869] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.138s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.222396] env[62974]: INFO nova.compute.claims [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.414967] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654141, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.497484] env[62974]: DEBUG nova.network.neutron [-] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.631565] env[62974]: DEBUG nova.network.neutron [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Updated VIF entry in instance network info cache for port 3b0107e6-4f52-40dc-90c3-d21197cbdf34. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 732.631905] env[62974]: DEBUG nova.network.neutron [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Updating instance_info_cache with network_info: [{"id": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "address": "fa:16:3e:26:69:10", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b0107e6-4f", "ovs_interfaceid": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.640737] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 732.640737] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ebada-d922-a92c-e682-293961a04fca" [ 732.640737] env[62974]: _type = "HttpNfcLease" [ 732.640737] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 732.641039] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 732.641039] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520ebada-d922-a92c-e682-293961a04fca" [ 732.641039] env[62974]: _type = "HttpNfcLease" [ 732.641039] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 732.641740] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b3b76f-d8b1-4281-86cd-0e9598a70081 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.649146] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e57104-d702-04ac-f2bf-74fb7a5a3edf/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 732.649355] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e57104-d702-04ac-f2bf-74fb7a5a3edf/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 732.726834] env[62974]: DEBUG nova.compute.utils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 732.729840] env[62974]: DEBUG nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 732.730032] env[62974]: DEBUG nova.network.neutron [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.761714] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-48a544b5-d3d2-49a4-9466-eed9497c22be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.911678] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654141, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.929781] env[62974]: DEBUG nova.policy [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e0f9b6a0f9f4f4e992381105c69dfbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57e631c2e78a4391bceb20072992f8bd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 733.005801] env[62974]: INFO nova.compute.manager [-] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Took 1.27 seconds to deallocate network for instance. [ 733.135165] env[62974]: DEBUG oslo_concurrency.lockutils [req-80537e46-b498-4e76-aee5-ea5c905d0f5b req-b4266ca2-13e6-4a91-a2e9-faf4f3eb6666 service nova] Releasing lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.232798] env[62974]: DEBUG nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 733.281890] env[62974]: DEBUG nova.network.neutron [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Successfully created port: 6154e0ec-ced2-4237-b78c-1703baf5b7dd {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.413730] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654141, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.513849] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.875655] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45079119-2dd6-4430-9a94-c355aa848b7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.889018] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9700e2be-4691-4494-99a2-50197e6cd4a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.936459] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ccfdb8-55ec-4e7f-bc79-03ac27a4f377 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.946142] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654141, 'name': CreateVM_Task, 'duration_secs': 2.177872} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.949045] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 733.950054] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.950405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.950967] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 733.953044] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa1535a-72cb-4454-b4c8-cfee4b3e76f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.961097] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-775c5f02-46dd-4d3c-a91f-d12095ab2e95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.963683] env[62974]: DEBUG nova.compute.manager [req-a7340605-62d7-43d7-8cd4-260cb87e6044 req-8e13d7d0-bd44-44d0-8fb7-3f2f4cfd9654 service nova] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Received event network-vif-deleted-3c57614f-5d9e-48de-b1c1-03931a43e20e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 733.975206] env[62974]: DEBUG nova.compute.provider_tree [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.978135] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 733.978135] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219996d-9358-76b3-d6e1-47a27ba1d471" [ 733.978135] env[62974]: _type = "Task" [ 733.978135] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.987808] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219996d-9358-76b3-d6e1-47a27ba1d471, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.250303] env[62974]: DEBUG nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 734.280150] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 734.280571] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.280823] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 734.281153] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.281313] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 734.281467] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 734.281691] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 734.281869] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 734.282020] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 734.282202] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 734.282442] env[62974]: DEBUG nova.virt.hardware [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 734.283410] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4c422e-fdde-48ad-ab3a-a573cde1af73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.291626] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561e7f1a-3775-4691-9461-39812a068e7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.479976] env[62974]: DEBUG nova.scheduler.client.report [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.496885] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219996d-9358-76b3-d6e1-47a27ba1d471, 'name': SearchDatastore_Task, 'duration_secs': 0.019193} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.496885] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.497666] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.497969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.498621] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.498621] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.499820] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d4ec8e9-89d1-4599-bb98-8efc2ce9d1c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.510406] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.510406] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 734.510824] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc999a4f-d669-44ed-bc8b-efa754a333d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.516637] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 734.516637] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52de4804-ac67-7998-ba76-5f17d37c2abb" [ 734.516637] env[62974]: _type = "Task" [ 734.516637] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.525383] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52de4804-ac67-7998-ba76-5f17d37c2abb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.850137] env[62974]: DEBUG nova.network.neutron [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Successfully updated port: 6154e0ec-ced2-4237-b78c-1703baf5b7dd {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 734.988617] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.989258] env[62974]: DEBUG nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 734.992144] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.855s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.992380] env[62974]: DEBUG nova.objects.instance [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lazy-loading 'resources' on Instance uuid 1933bc47-1717-48c1-b4a2-492a17573de7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.030814] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52de4804-ac67-7998-ba76-5f17d37c2abb, 'name': SearchDatastore_Task, 'duration_secs': 0.021342} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.031105] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50392956-4502-401c-843f-ffb837449588 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.037761] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 735.037761] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ddd668-5fec-69c5-5bda-e37288cf2535" [ 735.037761] env[62974]: _type = "Task" [ 735.037761] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.047224] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ddd668-5fec-69c5-5bda-e37288cf2535, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.354369] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "refresh_cache-bcacc508-b910-4144-bf0b-454b0928ca71" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.354586] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "refresh_cache-bcacc508-b910-4144-bf0b-454b0928ca71" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.354805] env[62974]: DEBUG nova.network.neutron [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.499593] env[62974]: DEBUG nova.compute.utils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.501225] env[62974]: DEBUG nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 735.501409] env[62974]: DEBUG nova.network.neutron [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.551923] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ddd668-5fec-69c5-5bda-e37288cf2535, 'name': SearchDatastore_Task, 'duration_secs': 0.015645} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.552277] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.552606] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/c002aec9-4fdf-45c9-9ef6-d196c4891e19.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.553987] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08cd02cc-b0ec-461f-b457-fc63fd9e13a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.562608] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 735.562608] env[62974]: value = "task-2654143" [ 735.562608] env[62974]: _type = "Task" [ 735.562608] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.571199] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.587347] env[62974]: DEBUG nova.policy [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f1bb5be437e42e38b80df35193d784a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28bc1945aba64a2ea67745b0d417b9ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 735.916021] env[62974]: DEBUG nova.network.neutron [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.004897] env[62974]: DEBUG nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.062022] env[62974]: DEBUG nova.compute.manager [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Received event network-vif-plugged-6154e0ec-ced2-4237-b78c-1703baf5b7dd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 736.062022] env[62974]: DEBUG oslo_concurrency.lockutils [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] Acquiring lock "bcacc508-b910-4144-bf0b-454b0928ca71-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.062022] env[62974]: DEBUG oslo_concurrency.lockutils [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] Lock "bcacc508-b910-4144-bf0b-454b0928ca71-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.062219] env[62974]: DEBUG oslo_concurrency.lockutils [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] Lock "bcacc508-b910-4144-bf0b-454b0928ca71-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.062340] env[62974]: DEBUG nova.compute.manager [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] No waiting events found dispatching network-vif-plugged-6154e0ec-ced2-4237-b78c-1703baf5b7dd {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 736.062488] env[62974]: WARNING nova.compute.manager [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Received unexpected event network-vif-plugged-6154e0ec-ced2-4237-b78c-1703baf5b7dd for instance with vm_state building and task_state spawning. [ 736.062636] env[62974]: DEBUG nova.compute.manager [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Received event network-changed-6154e0ec-ced2-4237-b78c-1703baf5b7dd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 736.062789] env[62974]: DEBUG nova.compute.manager [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Refreshing instance network info cache due to event network-changed-6154e0ec-ced2-4237-b78c-1703baf5b7dd. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 736.062899] env[62974]: DEBUG oslo_concurrency.lockutils [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] Acquiring lock "refresh_cache-bcacc508-b910-4144-bf0b-454b0928ca71" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.077876] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654143, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.184437] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0cdcc4-d7c9-4cd6-a97c-071f11b2a3eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.202187] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f2f260-2602-4c09-8613-d3641758b13e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.247311] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a318d321-59f2-4976-a9b8-f440dead6bc1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.256073] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b60151-d591-4837-85db-d963e47e4e75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.270263] env[62974]: DEBUG nova.compute.provider_tree [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.307688] env[62974]: DEBUG nova.network.neutron [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Updating instance_info_cache with network_info: [{"id": "6154e0ec-ced2-4237-b78c-1703baf5b7dd", "address": "fa:16:3e:a8:04:71", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6154e0ec-ce", "ovs_interfaceid": "6154e0ec-ced2-4237-b78c-1703baf5b7dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.332323] env[62974]: DEBUG nova.network.neutron [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Successfully created port: bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.581842] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715064} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.582144] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/c002aec9-4fdf-45c9-9ef6-d196c4891e19.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 736.582365] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.582606] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c12e3ccb-dd8d-4623-ad36-902af2acfae0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.589508] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 736.589508] env[62974]: value = "task-2654144" [ 736.589508] env[62974]: _type = "Task" [ 736.589508] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.599521] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.773747] env[62974]: DEBUG nova.scheduler.client.report [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.810383] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "refresh_cache-bcacc508-b910-4144-bf0b-454b0928ca71" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.810729] env[62974]: DEBUG nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Instance network_info: |[{"id": "6154e0ec-ced2-4237-b78c-1703baf5b7dd", "address": "fa:16:3e:a8:04:71", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6154e0ec-ce", "ovs_interfaceid": "6154e0ec-ced2-4237-b78c-1703baf5b7dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 736.811034] env[62974]: DEBUG oslo_concurrency.lockutils [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] Acquired lock "refresh_cache-bcacc508-b910-4144-bf0b-454b0928ca71" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.811216] env[62974]: DEBUG nova.network.neutron [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Refreshing network info cache for port 6154e0ec-ced2-4237-b78c-1703baf5b7dd {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 736.814026] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:04:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f54f7284-8f7d-47ee-839d-2143062cfe44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6154e0ec-ced2-4237-b78c-1703baf5b7dd', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.821271] env[62974]: DEBUG oslo.service.loopingcall [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 736.821794] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 736.822068] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0651373-dfdf-4150-af46-4ea94876ba4c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.844950] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.844950] env[62974]: value = "task-2654145" [ 736.844950] env[62974]: _type = "Task" [ 736.844950] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.854455] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654145, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.021287] env[62974]: DEBUG nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.047294] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.047875] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.048251] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.048396] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.048618] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.048833] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.049386] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.049646] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.049881] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.050117] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.050349] env[62974]: DEBUG nova.virt.hardware [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.051518] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f585b08-d6ce-48cb-a438-3a3d9003a206 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.064053] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ab11c4-a0e5-4066-b9fb-4a587ec89814 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.104271] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091189} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.104578] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.105642] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ffd9c1-e62e-4ef2-9630-a6bd4e08f2c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.130603] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/c002aec9-4fdf-45c9-9ef6-d196c4891e19.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.131016] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9017eac5-a85c-49c2-bf11-db2d89e61c63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.151431] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 737.151431] env[62974]: value = "task-2654146" [ 737.151431] env[62974]: _type = "Task" [ 737.151431] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.164443] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654146, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.287019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.293s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.288367] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.971s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.291021] env[62974]: INFO nova.compute.claims [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.324104] env[62974]: INFO nova.scheduler.client.report [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Deleted allocations for instance 1933bc47-1717-48c1-b4a2-492a17573de7 [ 737.356266] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654145, 'name': CreateVM_Task, 'duration_secs': 0.418066} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.360021] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.360021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.360021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.360021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.360021] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d173c9b3-bcef-4aa2-98d0-cee41f221887 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.364894] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 737.364894] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525c8dc7-625d-2cd4-bee8-007e70d1b5c5" [ 737.364894] env[62974]: _type = "Task" [ 737.364894] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.376911] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525c8dc7-625d-2cd4-bee8-007e70d1b5c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.551996] env[62974]: DEBUG nova.network.neutron [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Updated VIF entry in instance network info cache for port 6154e0ec-ced2-4237-b78c-1703baf5b7dd. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 737.552397] env[62974]: DEBUG nova.network.neutron [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Updating instance_info_cache with network_info: [{"id": "6154e0ec-ced2-4237-b78c-1703baf5b7dd", "address": "fa:16:3e:a8:04:71", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6154e0ec-ce", "ovs_interfaceid": "6154e0ec-ced2-4237-b78c-1703baf5b7dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.664475] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654146, 'name': ReconfigVM_Task, 'duration_secs': 0.425143} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.664475] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Reconfigured VM instance instance-0000002f to attach disk [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/c002aec9-4fdf-45c9-9ef6-d196c4891e19.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.664475] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44450b47-69c8-4461-be35-d6bc0ce75f60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.670648] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 737.670648] env[62974]: value = "task-2654147" [ 737.670648] env[62974]: _type = "Task" [ 737.670648] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.681600] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654147, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.837556] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08026b5f-9491-4fbf-a4d4-64fca87c3aa1 tempest-VolumesAssistedSnapshotsTest-2050976469 tempest-VolumesAssistedSnapshotsTest-2050976469-project-member] Lock "1933bc47-1717-48c1-b4a2-492a17573de7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.056s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.875419] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525c8dc7-625d-2cd4-bee8-007e70d1b5c5, 'name': SearchDatastore_Task, 'duration_secs': 0.012253} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.875743] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.875978] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.877493] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.877493] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.877493] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.877493] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6daa3c30-83c3-42af-85ac-054cef3b6ce6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.885214] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.885404] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.886200] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc9f61c7-d98f-4b51-b80e-5ce0264e9fef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.892088] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 737.892088] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e6903e-4467-b5a7-83b7-9646d46c8275" [ 737.892088] env[62974]: _type = "Task" [ 737.892088] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.908138] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e6903e-4467-b5a7-83b7-9646d46c8275, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.034083] env[62974]: DEBUG nova.network.neutron [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Successfully updated port: bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.055454] env[62974]: DEBUG oslo_concurrency.lockutils [req-a4396cb9-176b-481b-ac76-aa791ecf84c8 req-cb8c7c7f-a873-485c-9b9b-2eca7794e08a service nova] Releasing lock "refresh_cache-bcacc508-b910-4144-bf0b-454b0928ca71" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.171173] env[62974]: DEBUG nova.compute.manager [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Received event network-vif-plugged-bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 738.171335] env[62974]: DEBUG oslo_concurrency.lockutils [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] Acquiring lock "69fb00b3-6a41-4ef5-8876-6548cae31c07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.171546] env[62974]: DEBUG oslo_concurrency.lockutils [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.171715] env[62974]: DEBUG oslo_concurrency.lockutils [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.171884] env[62974]: DEBUG nova.compute.manager [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] No waiting events found dispatching network-vif-plugged-bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 738.172198] env[62974]: WARNING nova.compute.manager [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Received unexpected event network-vif-plugged-bb305260-1683-4681-aea1-92b24514bf2b for instance with vm_state building and task_state spawning. [ 738.172320] env[62974]: DEBUG nova.compute.manager [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Received event network-changed-bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 738.172528] env[62974]: DEBUG nova.compute.manager [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Refreshing instance network info cache due to event network-changed-bb305260-1683-4681-aea1-92b24514bf2b. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 738.172528] env[62974]: DEBUG oslo_concurrency.lockutils [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] Acquiring lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.172735] env[62974]: DEBUG oslo_concurrency.lockutils [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] Acquired lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.172805] env[62974]: DEBUG nova.network.neutron [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Refreshing network info cache for port bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.191734] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654147, 'name': Rename_Task, 'duration_secs': 0.24103} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.191952] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.192428] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb967f6a-de46-464b-9dec-9420f77122c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.199874] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 738.199874] env[62974]: value = "task-2654148" [ 738.199874] env[62974]: _type = "Task" [ 738.199874] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.210800] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654148, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.403650] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e6903e-4467-b5a7-83b7-9646d46c8275, 'name': SearchDatastore_Task, 'duration_secs': 0.013508} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.407618] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35c5d5d1-b298-4fa9-9d65-e046c2963c32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.417020] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 738.417020] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c740fc-cc00-c531-d40a-7674c64326cf" [ 738.417020] env[62974]: _type = "Task" [ 738.417020] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.424803] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c740fc-cc00-c531-d40a-7674c64326cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.538161] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.715162] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654148, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.740644] env[62974]: DEBUG nova.network.neutron [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.911175] env[62974]: DEBUG nova.network.neutron [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.931505] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c740fc-cc00-c531-d40a-7674c64326cf, 'name': SearchDatastore_Task, 'duration_secs': 0.016034} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.934083] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.934829] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] bcacc508-b910-4144-bf0b-454b0928ca71/bcacc508-b910-4144-bf0b-454b0928ca71.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.935199] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1becdb70-6a78-43b9-bcab-e773635b350f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.945965] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 738.945965] env[62974]: value = "task-2654149" [ 738.945965] env[62974]: _type = "Task" [ 738.945965] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.955358] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.957436] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af0d475-fa90-449c-aa47-0940ab9869a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.965897] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f2b884-9356-412d-88d4-ea09c50ee66b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.006057] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8984eb90-295f-4f60-a0ec-1f740d657c75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.015242] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd01753-41d7-4c19-bcf8-e375920bc219 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.039693] env[62974]: DEBUG nova.compute.provider_tree [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.214852] env[62974]: DEBUG oslo_vmware.api [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654148, 'name': PowerOnVM_Task, 'duration_secs': 0.540656} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.215349] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.215628] env[62974]: INFO nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Took 9.97 seconds to spawn the instance on the hypervisor. [ 739.215974] env[62974]: DEBUG nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.217327] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6e9195-fc41-449f-8f3d-15a0d281c9a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.416987] env[62974]: DEBUG oslo_concurrency.lockutils [req-7149072f-8cc6-462c-b8a0-ab94d17bad81 req-b05fa220-767a-4798-9eee-1928630c1bd3 service nova] Releasing lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.416987] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.416987] env[62974]: DEBUG nova.network.neutron [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.463474] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654149, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.544451] env[62974]: DEBUG nova.scheduler.client.report [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 739.742712] env[62974]: INFO nova.compute.manager [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Took 51.61 seconds to build instance. [ 739.964039] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654149, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.725998} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.964845] env[62974]: DEBUG nova.network.neutron [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.967729] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] bcacc508-b910-4144-bf0b-454b0928ca71/bcacc508-b910-4144-bf0b-454b0928ca71.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.967729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.967729] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30d34338-0f6f-4641-a6a6-8607a22850eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.973877] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 739.973877] env[62974]: value = "task-2654150" [ 739.973877] env[62974]: _type = "Task" [ 739.973877] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.981802] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654150, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.051107] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.051793] env[62974]: DEBUG nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 740.056538] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.662s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.058263] env[62974]: INFO nova.compute.claims [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.136736] env[62974]: DEBUG nova.network.neutron [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Updating instance_info_cache with network_info: [{"id": "bb305260-1683-4681-aea1-92b24514bf2b", "address": "fa:16:3e:53:14:12", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb305260-16", "ovs_interfaceid": "bb305260-1683-4681-aea1-92b24514bf2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.247513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c124cbdd-6c8b-4b3c-a5a6-dc8e6597be84 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.234s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.271319] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "55229db9-9442-4973-a1f2-7762227167a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.271580] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.489028] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654150, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146202} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.489529] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.493288] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf9ba68-20b0-4582-bc7b-f59c00950ce7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.520145] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] bcacc508-b910-4144-bf0b-454b0928ca71/bcacc508-b910-4144-bf0b-454b0928ca71.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.520953] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efd5006b-b805-4e52-8f2a-7ef63d2ebc80 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.549044] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 740.549044] env[62974]: value = "task-2654151" [ 740.549044] env[62974]: _type = "Task" [ 740.549044] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.558240] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654151, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.563283] env[62974]: DEBUG nova.compute.utils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 740.569245] env[62974]: DEBUG nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Not allocating networking since 'none' was specified. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 740.577924] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e57104-d702-04ac-f2bf-74fb7a5a3edf/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 740.579697] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bf1bee-446e-49c2-bdd2-a5d299e68b33 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.591473] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e57104-d702-04ac-f2bf-74fb7a5a3edf/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 740.591473] env[62974]: ERROR oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e57104-d702-04ac-f2bf-74fb7a5a3edf/disk-0.vmdk due to incomplete transfer. [ 740.591777] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d2f98518-f47c-446e-873f-fe0d94844e3e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.600491] env[62974]: DEBUG oslo_vmware.rw_handles [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e57104-d702-04ac-f2bf-74fb7a5a3edf/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 740.600758] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Uploaded image 3a433e01-7c04-4818-8d24-b093284570d1 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 740.603538] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 740.604210] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-05e28093-1970-4177-b09f-1b91b1a59bb6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.612972] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 740.612972] env[62974]: value = "task-2654152" [ 740.612972] env[62974]: _type = "Task" [ 740.612972] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.628422] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654152, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.639361] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.639807] env[62974]: DEBUG nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Instance network_info: |[{"id": "bb305260-1683-4681-aea1-92b24514bf2b", "address": "fa:16:3e:53:14:12", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb305260-16", "ovs_interfaceid": "bb305260-1683-4681-aea1-92b24514bf2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 740.640852] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:14:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb305260-1683-4681-aea1-92b24514bf2b', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.652201] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating folder: Project (28bc1945aba64a2ea67745b0d417b9ef). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.652509] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fe9b585-a9a2-4c1f-ad0f-f4674c920078 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.662513] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Created folder: Project (28bc1945aba64a2ea67745b0d417b9ef) in parent group-v535199. [ 740.662708] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating folder: Instances. Parent ref: group-v535336. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.662941] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8eb8d0af-cee9-4f33-ba1e-4ed50d5d8dcb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.670770] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Created folder: Instances in parent group-v535336. [ 740.671010] env[62974]: DEBUG oslo.service.loopingcall [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.671225] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.671430] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7db8393c-1366-494f-8a3e-565bace63323 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.701359] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.701359] env[62974]: value = "task-2654155" [ 740.701359] env[62974]: _type = "Task" [ 740.701359] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.706503] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654155, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.748682] env[62974]: DEBUG nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 740.988300] env[62974]: INFO nova.compute.manager [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Rescuing [ 740.988300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.988300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.988300] env[62974]: DEBUG nova.network.neutron [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.062353] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654151, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.067818] env[62974]: DEBUG nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 741.135928] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654152, 'name': Destroy_Task, 'duration_secs': 0.384412} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.135928] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Destroyed the VM [ 741.135928] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 741.135928] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-41317c16-77ca-494b-b1e6-154663308872 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.143497] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 741.143497] env[62974]: value = "task-2654156" [ 741.143497] env[62974]: _type = "Task" [ 741.143497] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.154896] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654156, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.208907] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654155, 'name': CreateVM_Task, 'duration_secs': 0.4842} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.210734] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.212342] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.214661] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.215030] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.215294] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4d54992-3ae2-4a1e-84e5-e87d1263bbea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.220557] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 741.220557] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d17d96-8fc7-dd85-8c79-8564d57ad01c" [ 741.220557] env[62974]: _type = "Task" [ 741.220557] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.229493] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d17d96-8fc7-dd85-8c79-8564d57ad01c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.273652] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.562096] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654151, 'name': ReconfigVM_Task, 'duration_secs': 0.628595} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.565206] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Reconfigured VM instance instance-00000030 to attach disk [datastore2] bcacc508-b910-4144-bf0b-454b0928ca71/bcacc508-b910-4144-bf0b-454b0928ca71.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.566157] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c13542c7-de2c-49b3-95c6-3323bb142975 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.585074] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 741.585074] env[62974]: value = "task-2654157" [ 741.585074] env[62974]: _type = "Task" [ 741.585074] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.597071] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654157, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.657701] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654156, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.675990] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0dc8a7-de9f-4865-add0-7151f78f9b52 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.682769] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49de03af-840e-4f08-b03a-469e74b2ce09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.718275] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c185236-250a-420a-af5e-8a5f2d0cff6d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.727890] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ed7498-5448-402c-b4ba-c7af1a66ba5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.735712] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d17d96-8fc7-dd85-8c79-8564d57ad01c, 'name': SearchDatastore_Task, 'duration_secs': 0.016554} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.739508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.739508] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.739508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.739508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.739778] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.739778] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85c90e92-d578-454f-aa31-5eed2324dc0c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.746863] env[62974]: DEBUG nova.compute.provider_tree [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.754639] env[62974]: DEBUG nova.network.neutron [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Updating instance_info_cache with network_info: [{"id": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "address": "fa:16:3e:26:69:10", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b0107e6-4f", "ovs_interfaceid": "3b0107e6-4f52-40dc-90c3-d21197cbdf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.757747] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.757921] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.758725] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-423c31e1-651d-4b2a-ae8e-3415c6320907 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.765152] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 741.765152] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52869abc-27f5-cb96-b614-dc8515473520" [ 741.765152] env[62974]: _type = "Task" [ 741.765152] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.774426] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52869abc-27f5-cb96-b614-dc8515473520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.081850] env[62974]: DEBUG nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 742.095092] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654157, 'name': Rename_Task, 'duration_secs': 0.298758} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.095201] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.095415] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-142dc092-514f-4699-868e-30dbf1fb35a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.101989] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 742.101989] env[62974]: value = "task-2654158" [ 742.101989] env[62974]: _type = "Task" [ 742.101989] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.109754] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.123984] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 742.124510] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.124687] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 742.124869] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.125020] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 742.125281] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 742.125519] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 742.125680] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 742.125844] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 742.126099] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 742.126267] env[62974]: DEBUG nova.virt.hardware [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 742.127246] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6e3931-762b-4047-b4e7-b3fefcefad2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.135015] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b67a4e0-3099-44e3-a814-ccad20bd72c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.150039] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.155639] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Creating folder: Project (3e956f61d5624b1fa6a2a23bc2668c21). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.158826] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76056bee-077e-4ec4-9f65-f2e2bd9b435f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.165348] env[62974]: DEBUG oslo_vmware.api [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654156, 'name': RemoveSnapshot_Task, 'duration_secs': 0.762789} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.165912] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 742.165912] env[62974]: INFO nova.compute.manager [None req-15421851-6e93-4abd-99c7-029deab6c7b2 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Took 13.78 seconds to snapshot the instance on the hypervisor. [ 742.169181] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Created folder: Project (3e956f61d5624b1fa6a2a23bc2668c21) in parent group-v535199. [ 742.169360] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Creating folder: Instances. Parent ref: group-v535339. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.169798] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bed87a14-2a26-44f2-834d-1c791bd5a63d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.178525] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Created folder: Instances in parent group-v535339. [ 742.178763] env[62974]: DEBUG oslo.service.loopingcall [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.179017] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.179212] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e0b9f4d-0625-4a7e-9675-e8a7380c87b3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.194793] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.194793] env[62974]: value = "task-2654161" [ 742.194793] env[62974]: _type = "Task" [ 742.194793] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.202611] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654161, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.250471] env[62974]: DEBUG nova.scheduler.client.report [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 742.257088] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-c002aec9-4fdf-45c9-9ef6-d196c4891e19" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.277028] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52869abc-27f5-cb96-b614-dc8515473520, 'name': SearchDatastore_Task, 'duration_secs': 0.013179} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.277903] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dad516cf-0fa5-4405-9399-2d2bb8707225 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.286017] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 742.286017] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219702e-8242-3a2c-5754-26a257582498" [ 742.286017] env[62974]: _type = "Task" [ 742.286017] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.297869] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219702e-8242-3a2c-5754-26a257582498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.611631] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654158, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.706722] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654161, 'name': CreateVM_Task, 'duration_secs': 0.303035} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.706904] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 742.707376] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.707521] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.708188] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 742.708188] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6025cbb-9f41-4643-844f-8cba9b4ba1c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.712737] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 742.712737] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529c94d5-3826-a23f-70d7-09a9b8d34738" [ 742.712737] env[62974]: _type = "Task" [ 742.712737] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.721482] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529c94d5-3826-a23f-70d7-09a9b8d34738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.756260] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.756786] env[62974]: DEBUG nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 742.759473] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 30.598s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.804658] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219702e-8242-3a2c-5754-26a257582498, 'name': SearchDatastore_Task, 'duration_secs': 0.012814} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.804966] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.805269] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 69fb00b3-6a41-4ef5-8876-6548cae31c07/69fb00b3-6a41-4ef5-8876-6548cae31c07.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 742.805533] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49cdfdc1-0123-4ed9-95f0-4854b2e02137 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.813699] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 742.813699] env[62974]: value = "task-2654162" [ 742.813699] env[62974]: _type = "Task" [ 742.813699] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.823180] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.113325] env[62974]: DEBUG oslo_vmware.api [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654158, 'name': PowerOnVM_Task, 'duration_secs': 0.846518} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.113602] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.113748] env[62974]: INFO nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Took 8.86 seconds to spawn the instance on the hypervisor. [ 743.113938] env[62974]: DEBUG nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.114760] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a0b7fc-6ed9-4ff7-96b6-50bbd85a5c77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.226402] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529c94d5-3826-a23f-70d7-09a9b8d34738, 'name': SearchDatastore_Task, 'duration_secs': 0.039994} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.230031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.230031] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.230031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.230031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.230458] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.230458] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8b5762c-6484-42b6-836a-ad127dddb6e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.246048] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.246048] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.246048] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb8aa65d-484e-41ac-a80c-3f5de2024bef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.251852] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 743.251852] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f4f53-737b-f49f-8c13-85eb111f0586" [ 743.251852] env[62974]: _type = "Task" [ 743.251852] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.265577] env[62974]: INFO nova.compute.claims [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.279257] env[62974]: DEBUG nova.compute.utils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.281325] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f4f53-737b-f49f-8c13-85eb111f0586, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.282041] env[62974]: DEBUG nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 743.282564] env[62974]: DEBUG nova.network.neutron [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 743.329689] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654162, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.349849] env[62974]: DEBUG nova.policy [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49d8e3a243d346e8969ba6f325e7787e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9087d01b1ad748e0a66474953dfe7034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 743.638966] env[62974]: INFO nova.compute.manager [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Took 42.50 seconds to build instance. [ 743.763843] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f4f53-737b-f49f-8c13-85eb111f0586, 'name': SearchDatastore_Task, 'duration_secs': 0.05004} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.764755] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-086e8056-1cc8-4c2e-a259-d34209ca8124 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.771949] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 743.771949] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bc68f8-73bb-2252-fa4c-90071978b9ed" [ 743.771949] env[62974]: _type = "Task" [ 743.771949] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.780500] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bc68f8-73bb-2252-fa4c-90071978b9ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.784230] env[62974]: INFO nova.compute.resource_tracker [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating resource usage from migration 51863248-1bda-40f6-8d3a-2b1dc321bf21 [ 743.788270] env[62974]: DEBUG nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.807870] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 743.808287] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b4b4dbe-5470-4b98-8ea8-948f984c3208 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.815976] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 743.815976] env[62974]: value = "task-2654163" [ 743.815976] env[62974]: _type = "Task" [ 743.815976] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.833763] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540675} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.834620] env[62974]: DEBUG nova.network.neutron [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Successfully created port: 58ed2814-e050-4f6f-9847-7912e525e286 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.839409] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 69fb00b3-6a41-4ef5-8876-6548cae31c07/69fb00b3-6a41-4ef5-8876-6548cae31c07.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 743.839649] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.840275] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.840531] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43b3db1c-5415-41ce-a63f-2fb99bde47dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.851770] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 743.851770] env[62974]: value = "task-2654164" [ 743.851770] env[62974]: _type = "Task" [ 743.851770] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.861098] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654164, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.142524] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d04bc4a-5710-4287-93a3-4546845c3e48 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "bcacc508-b910-4144-bf0b-454b0928ca71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.217s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.282391] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bc68f8-73bb-2252-fa4c-90071978b9ed, 'name': SearchDatastore_Task, 'duration_secs': 0.037899} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.282665] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.282935] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 4967d5be-6cd4-4f23-aca4-d9ae11112369/4967d5be-6cd4-4f23-aca4-d9ae11112369.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.284536] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48c70ab9-6d49-4265-a067-74f13960b550 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.290521] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 744.290521] env[62974]: value = "task-2654165" [ 744.290521] env[62974]: _type = "Task" [ 744.290521] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.302218] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.331747] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654163, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.362534] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654164, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07164} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.362797] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 744.363575] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4d85f1-31c2-4b75-9233-f97724c8a488 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.390654] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 69fb00b3-6a41-4ef5-8876-6548cae31c07/69fb00b3-6a41-4ef5-8876-6548cae31c07.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 744.393351] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a23e241-58fd-4456-bbe8-e6ffff17c155 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.414649] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 744.414649] env[62974]: value = "task-2654166" [ 744.414649] env[62974]: _type = "Task" [ 744.414649] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.425183] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.475008] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce30a570-726d-461a-a8c8-46f55907ee38 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.485427] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68163886-4dae-4bc1-8036-ecc0de8e2437 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.522419] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3c18f8-95d0-4c0b-9102-66a151e08e90 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.531223] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ab0480-d4dc-4c2c-940c-ffa6794eb001 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.546564] env[62974]: DEBUG nova.compute.provider_tree [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.648518] env[62974]: DEBUG nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.718072] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "bcacc508-b910-4144-bf0b-454b0928ca71" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.718072] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "bcacc508-b910-4144-bf0b-454b0928ca71" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.718228] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "bcacc508-b910-4144-bf0b-454b0928ca71-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.718434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "bcacc508-b910-4144-bf0b-454b0928ca71-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.718614] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "bcacc508-b910-4144-bf0b-454b0928ca71-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.723021] env[62974]: INFO nova.compute.manager [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Terminating instance [ 744.801880] env[62974]: DEBUG nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 744.804179] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654165, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.829631] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.833020] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.833020] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.833020] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.833020] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.833020] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.833404] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.833404] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.833404] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.833404] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.833404] env[62974]: DEBUG nova.virt.hardware [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.833665] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb9109a-7f11-4df9-b01f-144b6bfba264 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.840243] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654163, 'name': PowerOffVM_Task, 'duration_secs': 0.518404} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.840989] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 744.841876] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5418445-72cc-4d05-ae4c-1cd9bcfbf819 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.848156] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146cb662-94c3-42c4-b493-655ef3ab2489 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.867307] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffd829d-be29-4250-8331-764f4beb9952 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.911532] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 744.911887] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0f52a0a-b5db-42e5-be96-dd787724bb87 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.920019] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 744.920019] env[62974]: value = "task-2654167" [ 744.920019] env[62974]: _type = "Task" [ 744.920019] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.927369] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654166, 'name': ReconfigVM_Task, 'duration_secs': 0.3104} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.928154] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 69fb00b3-6a41-4ef5-8876-6548cae31c07/69fb00b3-6a41-4ef5-8876-6548cae31c07.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 744.928845] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a6e7c06-e754-4205-8391-40ce3559c78c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.934961] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 744.935467] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.935467] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.935562] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.935763] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.936038] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-672b108b-fa1e-4a12-9ca8-800cc2996c39 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.939746] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 744.939746] env[62974]: value = "task-2654168" [ 744.939746] env[62974]: _type = "Task" [ 744.939746] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.948300] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654168, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.950158] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.950389] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 744.951151] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1f2ab6d-b95c-4277-9b4a-a202e63312a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.956845] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 744.956845] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521fcdf0-614e-6ed5-b15e-0f28934bc72a" [ 744.956845] env[62974]: _type = "Task" [ 744.956845] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.965754] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521fcdf0-614e-6ed5-b15e-0f28934bc72a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.986301] env[62974]: DEBUG nova.compute.manager [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.987245] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69253450-9fb5-4024-a493-4c13867cca07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.050279] env[62974]: DEBUG nova.scheduler.client.report [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.174910] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.229295] env[62974]: DEBUG nova.compute.manager [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 745.229295] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 745.229295] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec678e5a-04a1-4ff2-9fe0-c3af8f50fa32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.237087] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.237087] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3eb9a4ab-dfe2-4e7a-afe6-51a502c338b3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.244250] env[62974]: DEBUG oslo_vmware.api [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 745.244250] env[62974]: value = "task-2654169" [ 745.244250] env[62974]: _type = "Task" [ 745.244250] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.250606] env[62974]: DEBUG oslo_vmware.api [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654169, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.302360] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654165, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.894597} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.302712] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 4967d5be-6cd4-4f23-aca4-d9ae11112369/4967d5be-6cd4-4f23-aca4-d9ae11112369.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 745.303241] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.303241] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-492baefd-dc79-4e79-ad92-34a503df3939 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.309558] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 745.309558] env[62974]: value = "task-2654170" [ 745.309558] env[62974]: _type = "Task" [ 745.309558] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.317728] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.451686] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654168, 'name': Rename_Task, 'duration_secs': 0.276127} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.451686] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 745.451686] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5890081-09bc-4f12-a979-03f0fe0f09c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.457459] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 745.457459] env[62974]: value = "task-2654171" [ 745.457459] env[62974]: _type = "Task" [ 745.457459] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.469433] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521fcdf0-614e-6ed5-b15e-0f28934bc72a, 'name': SearchDatastore_Task, 'duration_secs': 0.059404} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.472635] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.472876] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bba71390-b2cf-421b-994d-c73b89421441 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.478120] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 745.478120] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526e83c8-89e2-2159-f83b-3e01fde562df" [ 745.478120] env[62974]: _type = "Task" [ 745.478120] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.480217] env[62974]: DEBUG nova.compute.manager [req-b3327fd0-b9d8-4e74-afae-fe18902c1397 req-1e7581c8-ee3a-4d62-b444-290b190f070f service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Received event network-vif-plugged-58ed2814-e050-4f6f-9847-7912e525e286 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 745.480456] env[62974]: DEBUG oslo_concurrency.lockutils [req-b3327fd0-b9d8-4e74-afae-fe18902c1397 req-1e7581c8-ee3a-4d62-b444-290b190f070f service nova] Acquiring lock "65615fd7-c219-4c19-8ecf-11336b616ead-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.480677] env[62974]: DEBUG oslo_concurrency.lockutils [req-b3327fd0-b9d8-4e74-afae-fe18902c1397 req-1e7581c8-ee3a-4d62-b444-290b190f070f service nova] Lock "65615fd7-c219-4c19-8ecf-11336b616ead-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.480859] env[62974]: DEBUG oslo_concurrency.lockutils [req-b3327fd0-b9d8-4e74-afae-fe18902c1397 req-1e7581c8-ee3a-4d62-b444-290b190f070f service nova] Lock "65615fd7-c219-4c19-8ecf-11336b616ead-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.481271] env[62974]: DEBUG nova.compute.manager [req-b3327fd0-b9d8-4e74-afae-fe18902c1397 req-1e7581c8-ee3a-4d62-b444-290b190f070f service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] No waiting events found dispatching network-vif-plugged-58ed2814-e050-4f6f-9847-7912e525e286 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 745.481535] env[62974]: WARNING nova.compute.manager [req-b3327fd0-b9d8-4e74-afae-fe18902c1397 req-1e7581c8-ee3a-4d62-b444-290b190f070f service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Received unexpected event network-vif-plugged-58ed2814-e050-4f6f-9847-7912e525e286 for instance with vm_state building and task_state spawning. [ 745.491100] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526e83c8-89e2-2159-f83b-3e01fde562df, 'name': SearchDatastore_Task, 'duration_secs': 0.009266} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.491100] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.491357] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. {{(pid=62974) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 745.491678] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a9f29bf-e9e6-41f8-975e-2548e5bb2253 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.498328] env[62974]: INFO nova.compute.manager [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] instance snapshotting [ 745.499990] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 745.499990] env[62974]: value = "task-2654172" [ 745.499990] env[62974]: _type = "Task" [ 745.499990] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.500974] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2575ca3-eada-43e2-93b4-4ee144c6b5e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.514175] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.529425] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c145bb-1062-4360-b094-b3bca67fb516 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.555546] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.796s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.555667] env[62974]: INFO nova.compute.manager [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Migrating [ 745.564922] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.701s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.564922] env[62974]: DEBUG nova.objects.instance [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lazy-loading 'resources' on Instance uuid 5bc466fb-eebb-40b1-ba09-614a25782ecd {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 745.642077] env[62974]: DEBUG nova.network.neutron [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Successfully updated port: 58ed2814-e050-4f6f-9847-7912e525e286 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.752783] env[62974]: DEBUG oslo_vmware.api [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654169, 'name': PowerOffVM_Task, 'duration_secs': 0.192192} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.753173] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 745.753439] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 745.753763] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1025b210-a904-4638-a385-bc003b711c14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.819635] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059453} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.820256] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.821174] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b057930-c85c-46fc-9f72-6c98850fc44e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.843378] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 4967d5be-6cd4-4f23-aca4-d9ae11112369/4967d5be-6cd4-4f23-aca4-d9ae11112369.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.843844] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49706285-3aa7-482c-b870-80834d3f0690 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.864834] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 745.864834] env[62974]: value = "task-2654174" [ 745.864834] env[62974]: _type = "Task" [ 745.864834] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.873275] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654174, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.971720] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654171, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.009993] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654172, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472447} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.010373] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. [ 746.011553] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677e2cf4-29c7-4e0e-8909-3fbdee9939f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.038276] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.038583] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-367b143b-b6c5-41dd-8aa5-71b78913096d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.052294] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 746.052596] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-baa8d1e6-88f4-4940-ba6e-882756a6084e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.060823] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 746.060823] env[62974]: value = "task-2654176" [ 746.060823] env[62974]: _type = "Task" [ 746.060823] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.062396] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 746.062396] env[62974]: value = "task-2654175" [ 746.062396] env[62974]: _type = "Task" [ 746.062396] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.075104] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654176, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.082897] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.082897] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.082897] env[62974]: DEBUG nova.network.neutron [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.083345] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654175, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.107679] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 746.107895] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 746.108180] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleting the datastore file [datastore2] bcacc508-b910-4144-bf0b-454b0928ca71 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.108705] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfbf7fa9-5e03-4367-88bb-40ec20a06a27 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.118078] env[62974]: DEBUG oslo_vmware.api [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 746.118078] env[62974]: value = "task-2654177" [ 746.118078] env[62974]: _type = "Task" [ 746.118078] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.127173] env[62974]: DEBUG oslo_vmware.api [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.145440] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-65615fd7-c219-4c19-8ecf-11336b616ead" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.145587] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-65615fd7-c219-4c19-8ecf-11336b616ead" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.145732] env[62974]: DEBUG nova.network.neutron [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.379975] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654174, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.451199] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.451850] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.478674] env[62974]: DEBUG oslo_vmware.api [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654171, 'name': PowerOnVM_Task, 'duration_secs': 0.746389} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.479147] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 746.479463] env[62974]: INFO nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Took 9.46 seconds to spawn the instance on the hypervisor. [ 746.479766] env[62974]: DEBUG nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.480997] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39ad1da-11b4-49e8-979d-56f6a2a7f77c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.580348] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654176, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.586402] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654175, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.628771] env[62974]: DEBUG oslo_vmware.api [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.464748} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.629508] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.629717] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 746.629896] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 746.630079] env[62974]: INFO nova.compute.manager [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Took 1.40 seconds to destroy the instance on the hypervisor. [ 746.630335] env[62974]: DEBUG oslo.service.loopingcall [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.632904] env[62974]: DEBUG nova.compute.manager [-] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 746.633032] env[62974]: DEBUG nova.network.neutron [-] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.712306] env[62974]: DEBUG nova.network.neutron [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.753955] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cde8c95-63c7-43e4-a837-e1a13fbe9c54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.766446] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabc27cf-364f-435e-8639-bebcfc55c66f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.829830] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3daafb9d-1af3-4763-bc85-58328cf07eb4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.841418] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1987316e-8839-4bb6-a332-1c039eddcb05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.863448] env[62974]: DEBUG nova.compute.provider_tree [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.878905] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654174, 'name': ReconfigVM_Task, 'duration_secs': 0.53476} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.879254] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 4967d5be-6cd4-4f23-aca4-d9ae11112369/4967d5be-6cd4-4f23-aca4-d9ae11112369.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.880142] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89604572-83d6-417d-9696-bd101ab29f9b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.889381] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 746.889381] env[62974]: value = "task-2654178" [ 746.889381] env[62974]: _type = "Task" [ 746.889381] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.902082] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654178, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.963405] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.963405] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 747.009366] env[62974]: INFO nova.compute.manager [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Took 42.96 seconds to build instance. [ 747.078630] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654176, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.081939] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654175, 'name': ReconfigVM_Task, 'duration_secs': 0.941266} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.082219] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Reconfigured VM instance instance-0000002f to attach disk [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.083075] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5622afbb-e72f-4bad-a8d8-ec21db390071 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.112970] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2eb59890-16fd-4f4a-b3b5-38a0937432c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.123121] env[62974]: DEBUG nova.network.neutron [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance_info_cache with network_info: [{"id": "3b60d221-2cab-4e30-8892-d139b511ccc1", "address": "fa:16:3e:80:cf:bd", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b60d221-2c", "ovs_interfaceid": "3b60d221-2cab-4e30-8892-d139b511ccc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.124934] env[62974]: DEBUG nova.network.neutron [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Updating instance_info_cache with network_info: [{"id": "58ed2814-e050-4f6f-9847-7912e525e286", "address": "fa:16:3e:61:a2:1e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58ed2814-e0", "ovs_interfaceid": "58ed2814-e050-4f6f-9847-7912e525e286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.132493] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 747.132493] env[62974]: value = "task-2654179" [ 747.132493] env[62974]: _type = "Task" [ 747.132493] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.142441] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654179, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.370034] env[62974]: DEBUG nova.scheduler.client.report [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.402769] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654178, 'name': Rename_Task, 'duration_secs': 0.264326} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.402769] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 747.402769] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61669276-2fbc-4bb0-a29f-6851648586a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.410036] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 747.410036] env[62974]: value = "task-2654180" [ 747.410036] env[62974]: _type = "Task" [ 747.410036] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.417048] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.512705] env[62974]: DEBUG oslo_concurrency.lockutils [None req-188bb122-eefe-494a-b964-22600c3dfbb1 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.469s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.575832] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654176, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.594605] env[62974]: DEBUG nova.compute.manager [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Received event network-changed-58ed2814-e050-4f6f-9847-7912e525e286 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 747.594810] env[62974]: DEBUG nova.compute.manager [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Refreshing instance network info cache due to event network-changed-58ed2814-e050-4f6f-9847-7912e525e286. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 747.595167] env[62974]: DEBUG oslo_concurrency.lockutils [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] Acquiring lock "refresh_cache-65615fd7-c219-4c19-8ecf-11336b616ead" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.627741] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.631421] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-65615fd7-c219-4c19-8ecf-11336b616ead" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.631762] env[62974]: DEBUG nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Instance network_info: |[{"id": "58ed2814-e050-4f6f-9847-7912e525e286", "address": "fa:16:3e:61:a2:1e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58ed2814-e0", "ovs_interfaceid": "58ed2814-e050-4f6f-9847-7912e525e286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.632135] env[62974]: DEBUG oslo_concurrency.lockutils [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] Acquired lock "refresh_cache-65615fd7-c219-4c19-8ecf-11336b616ead" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.632369] env[62974]: DEBUG nova.network.neutron [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Refreshing network info cache for port 58ed2814-e050-4f6f-9847-7912e525e286 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.633781] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:a2:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58ed2814-e050-4f6f-9847-7912e525e286', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.645158] env[62974]: DEBUG oslo.service.loopingcall [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.646576] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.649920] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-beba0c02-a54a-42a7-887c-1e5ae6c318c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.672489] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654179, 'name': ReconfigVM_Task, 'duration_secs': 0.505561} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.674155] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 747.674325] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.674325] env[62974]: value = "task-2654181" [ 747.674325] env[62974]: _type = "Task" [ 747.674325] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.674509] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c29d986-76b1-4cbf-890a-d848722c89db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.687273] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654181, 'name': CreateVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.688632] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 747.688632] env[62974]: value = "task-2654182" [ 747.688632] env[62974]: _type = "Task" [ 747.688632] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.698815] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.726821] env[62974]: DEBUG nova.network.neutron [-] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.875237] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.309s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.876541] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.346s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.877359] env[62974]: DEBUG nova.objects.instance [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 747.911700] env[62974]: INFO nova.scheduler.client.report [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Deleted allocations for instance 5bc466fb-eebb-40b1-ba09-614a25782ecd [ 747.933154] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654180, 'name': PowerOnVM_Task} progress is 96%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.014511] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 748.080028] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654176, 'name': CreateSnapshot_Task, 'duration_secs': 1.640492} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.080028] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 748.080028] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3f07b1-3273-485c-8ea2-2e8f3290937f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.186591] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654181, 'name': CreateVM_Task, 'duration_secs': 0.37011} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.189495] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.190481] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.190776] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.191221] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 748.201126] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f99515d-1b25-4330-81e4-700fa6d7de06 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.209177] env[62974]: DEBUG oslo_vmware.api [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654182, 'name': PowerOnVM_Task, 'duration_secs': 0.455535} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.210891] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 748.212997] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 748.212997] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520cb33f-f2df-37d0-5b42-145146374e35" [ 748.212997] env[62974]: _type = "Task" [ 748.212997] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.219432] env[62974]: DEBUG nova.compute.manager [None req-c2ce38fa-5c10-49c3-a148-bfffc58a5e9b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 748.220695] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf186a2d-7ea9-44c4-9764-e5ef90cd384d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.231320] env[62974]: INFO nova.compute.manager [-] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Took 1.60 seconds to deallocate network for instance. [ 748.231841] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520cb33f-f2df-37d0-5b42-145146374e35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.419652] env[62974]: DEBUG nova.network.neutron [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Updated VIF entry in instance network info cache for port 58ed2814-e050-4f6f-9847-7912e525e286. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 748.421044] env[62974]: DEBUG nova.network.neutron [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Updating instance_info_cache with network_info: [{"id": "58ed2814-e050-4f6f-9847-7912e525e286", "address": "fa:16:3e:61:a2:1e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58ed2814-e0", "ovs_interfaceid": "58ed2814-e050-4f6f-9847-7912e525e286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.426947] env[62974]: DEBUG oslo_vmware.api [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654180, 'name': PowerOnVM_Task, 'duration_secs': 0.651371} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.427888] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 748.427993] env[62974]: INFO nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Took 6.35 seconds to spawn the instance on the hypervisor. [ 748.428479] env[62974]: DEBUG nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 748.428694] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7419da82-a7ca-4a16-8f0a-94dfa16abb5a tempest-ServerShowV247Test-319425169 tempest-ServerShowV247Test-319425169-project-member] Lock "5bc466fb-eebb-40b1-ba09-614a25782ecd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.908s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.430056] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31d619a-c3dd-4b58-9144-c91f9c4e1918 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.538337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.604657] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 748.605574] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-54c7dc9c-5957-49de-8f4a-68f1dd2ae903 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.616195] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 748.616195] env[62974]: value = "task-2654183" [ 748.616195] env[62974]: _type = "Task" [ 748.616195] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.625501] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654183, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.725969] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520cb33f-f2df-37d0-5b42-145146374e35, 'name': SearchDatastore_Task, 'duration_secs': 0.025144} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.726325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.726574] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.726969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.726969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.727750] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.727750] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80f1ceed-0208-41ea-911f-c0eb1061de0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.735610] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.735819] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.736642] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91f045e7-7aab-4c16-baa1-f769342a7228 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.743763] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.746586] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 748.746586] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524efa5c-d79a-8400-5074-e7ae713653ec" [ 748.746586] env[62974]: _type = "Task" [ 748.746586] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.757760] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524efa5c-d79a-8400-5074-e7ae713653ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010399} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.759531] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57a4cbda-0de6-46e7-a35d-3fa1ce2d704a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.766229] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 748.766229] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fd3d58-71ae-cb98-7103-605c4274a25c" [ 748.766229] env[62974]: _type = "Task" [ 748.766229] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.779018] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fd3d58-71ae-cb98-7103-605c4274a25c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.889443] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a606f24-280c-4dfa-8176-bd2c7ef5df4a tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.889443] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.793s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.889443] env[62974]: INFO nova.compute.claims [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.931875] env[62974]: DEBUG oslo_concurrency.lockutils [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] Releasing lock "refresh_cache-65615fd7-c219-4c19-8ecf-11336b616ead" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.931875] env[62974]: DEBUG nova.compute.manager [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Received event network-vif-deleted-6154e0ec-ced2-4237-b78c-1703baf5b7dd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.931875] env[62974]: INFO nova.compute.manager [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Neutron deleted interface 6154e0ec-ced2-4237-b78c-1703baf5b7dd; detaching it from the instance and deleting it from the info cache [ 748.931991] env[62974]: DEBUG nova.network.neutron [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.951518] env[62974]: INFO nova.compute.manager [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Took 39.66 seconds to build instance. [ 749.136252] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654183, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.143088] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43109a92-1a66-4a04-81b3-b8a9712e84b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.163036] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance '8621428e-cf42-47a4-82c8-a003c377b257' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 749.278682] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fd3d58-71ae-cb98-7103-605c4274a25c, 'name': SearchDatastore_Task, 'duration_secs': 0.020989} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.279076] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.279431] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.279633] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e5cb06b-1942-45f1-a2dd-28447233a841 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.287475] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 749.287475] env[62974]: value = "task-2654184" [ 749.287475] env[62974]: _type = "Task" [ 749.287475] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.298133] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.436653] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-304a027a-acdd-4b31-adb6-808c84c59ae6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.448624] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d04977-cfc4-4eb9-adf7-cbe5617e793f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.460624] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aad2300b-cd2c-4d98-ad6c-7766a3066a86 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "4967d5be-6cd4-4f23-aca4-d9ae11112369" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.103s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.493014] env[62974]: DEBUG nova.compute.manager [req-da115f5c-154f-4cb1-834e-07869e2afe6b req-bc4e51c4-bf2f-4c44-add4-8fc248b495dd service nova] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Detach interface failed, port_id=6154e0ec-ced2-4237-b78c-1703baf5b7dd, reason: Instance bcacc508-b910-4144-bf0b-454b0928ca71 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 749.520570] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.520688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.520829] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 749.633036] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654183, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.670174] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.670919] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca521269-3513-47f2-9613-13e936eeefb9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.679021] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 749.679021] env[62974]: value = "task-2654185" [ 749.679021] env[62974]: _type = "Task" [ 749.679021] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.687890] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.797599] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654184, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.860152] env[62974]: DEBUG nova.compute.manager [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Received event network-changed-bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 749.861173] env[62974]: DEBUG nova.compute.manager [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Refreshing instance network info cache due to event network-changed-bb305260-1683-4681-aea1-92b24514bf2b. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 749.861173] env[62974]: DEBUG oslo_concurrency.lockutils [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] Acquiring lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.861173] env[62974]: DEBUG oslo_concurrency.lockutils [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] Acquired lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.861173] env[62974]: DEBUG nova.network.neutron [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Refreshing network info cache for port bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.966020] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 750.132378] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654183, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.189796] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654185, 'name': PowerOffVM_Task, 'duration_secs': 0.403278} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.190085] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.190294] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance '8621428e-cf42-47a4-82c8-a003c377b257' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 750.322641] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572155} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.322957] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.323234] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.323943] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28a95c47-cbeb-41f3-88f4-d2fb921c8c44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.338075] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 750.338075] env[62974]: value = "task-2654186" [ 750.338075] env[62974]: _type = "Task" [ 750.338075] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.355372] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.414050] env[62974]: DEBUG nova.compute.manager [None req-e3f1a683-8e38-46b8-9dda-eccd5fc13396 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 750.418869] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d516b30d-40d1-4f4a-9423-b468bab48f83 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.509477] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.635815] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654183, 'name': CloneVM_Task, 'duration_secs': 1.740996} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.640020] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56957e33-54a4-4f55-8686-0e2c141d25ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.641478] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Created linked-clone VM from snapshot [ 750.642503] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f92398-a35c-4818-8d5d-e35e4b6401f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.652482] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e550d05-2e50-485d-9f2a-79624df36e68 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.655850] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Uploading image ef61c836-da44-4806-95f8-83ec0dadbfaa {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 750.692045] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a049378b-9bfa-4b05-93e1-e65550e6a120 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.703128] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 750.703128] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.703128] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 750.703128] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.703366] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 750.703366] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 750.703366] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 750.703366] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 750.703366] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 750.703531] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 750.703531] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 750.714279] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 750.714279] env[62974]: value = "vm-535344" [ 750.714279] env[62974]: _type = "VirtualMachine" [ 750.714279] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 750.714279] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88225a7a-4b6a-4f00-9c9d-b2e461e8f5ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.722949] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba58b2b-6362-49b9-a332-67f0a4e170a2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.727392] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-110c1893-2426-471c-b676-85f27d661ffa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.729496] env[62974]: DEBUG nova.network.neutron [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Updated VIF entry in instance network info cache for port bb305260-1683-4681-aea1-92b24514bf2b. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.729835] env[62974]: DEBUG nova.network.neutron [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Updating instance_info_cache with network_info: [{"id": "bb305260-1683-4681-aea1-92b24514bf2b", "address": "fa:16:3e:53:14:12", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb305260-16", "ovs_interfaceid": "bb305260-1683-4681-aea1-92b24514bf2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.742629] env[62974]: DEBUG nova.compute.provider_tree [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.747021] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 750.747021] env[62974]: value = "task-2654187" [ 750.747021] env[62974]: _type = "Task" [ 750.747021] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.747021] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lease: (returnval){ [ 750.747021] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527553e4-65dc-4907-ce86-95c77935bd37" [ 750.747021] env[62974]: _type = "HttpNfcLease" [ 750.747021] env[62974]: } obtained for exporting VM: (result){ [ 750.747021] env[62974]: value = "vm-535344" [ 750.747021] env[62974]: _type = "VirtualMachine" [ 750.747021] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 750.747021] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the lease: (returnval){ [ 750.747021] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527553e4-65dc-4907-ce86-95c77935bd37" [ 750.747021] env[62974]: _type = "HttpNfcLease" [ 750.747021] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 750.757996] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 750.757996] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527553e4-65dc-4907-ce86-95c77935bd37" [ 750.757996] env[62974]: _type = "HttpNfcLease" [ 750.757996] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 750.763980] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 750.763980] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527553e4-65dc-4907-ce86-95c77935bd37" [ 750.763980] env[62974]: _type = "HttpNfcLease" [ 750.763980] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 750.764780] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.765581] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8ef10f-5c7e-4354-aab8-36f647bd2eee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.774533] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52be1b34-cd68-aeda-e152-6f3ce1cb74ce/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 750.774823] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52be1b34-cd68-aeda-e152-6f3ce1cb74ce/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 750.847799] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095679} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.848175] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.849064] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939fff47-ccd4-4f8b-a5fa-9b83588ee99f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.872066] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.874927] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1070e8be-3314-48d1-b75a-e52cf8743c5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.890107] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "4967d5be-6cd4-4f23-aca4-d9ae11112369" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.890388] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "4967d5be-6cd4-4f23-aca4-d9ae11112369" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.890644] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "4967d5be-6cd4-4f23-aca4-d9ae11112369-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.890804] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "4967d5be-6cd4-4f23-aca4-d9ae11112369-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.890978] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "4967d5be-6cd4-4f23-aca4-d9ae11112369-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.895332] env[62974]: INFO nova.compute.manager [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Terminating instance [ 750.899267] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 750.899267] env[62974]: value = "task-2654189" [ 750.899267] env[62974]: _type = "Task" [ 750.899267] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.907711] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9fcc027b-2dca-4b4e-ba26-c0f4c19a2d41 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.915724] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.931302] env[62974]: INFO nova.compute.manager [None req-e3f1a683-8e38-46b8-9dda-eccd5fc13396 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] instance snapshotting [ 750.932064] env[62974]: DEBUG nova.objects.instance [None req-e3f1a683-8e38-46b8-9dda-eccd5fc13396 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lazy-loading 'flavor' on Instance uuid 4967d5be-6cd4-4f23-aca4-d9ae11112369 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.933995] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [{"id": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "address": "fa:16:3e:34:87:aa", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b21ba-e0", "ovs_interfaceid": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.096231] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "e42547b0-25b7-4a34-b832-b93103065928" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.096645] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "e42547b0-25b7-4a34-b832-b93103065928" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.233177] env[62974]: DEBUG oslo_concurrency.lockutils [req-476215e5-d755-494d-aea5-079f1b065489 req-12475189-dffc-4c23-8da6-f7d5ea8d73aa service nova] Releasing lock "refresh_cache-69fb00b3-6a41-4ef5-8876-6548cae31c07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.248584] env[62974]: DEBUG nova.scheduler.client.report [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.261109] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654187, 'name': ReconfigVM_Task, 'duration_secs': 0.252641} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.261470] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance '8621428e-cf42-47a4-82c8-a003c377b257' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 751.404035] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "refresh_cache-4967d5be-6cd4-4f23-aca4-d9ae11112369" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.404035] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquired lock "refresh_cache-4967d5be-6cd4-4f23-aca4-d9ae11112369" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.404035] env[62974]: DEBUG nova.network.neutron [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.415791] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.444724] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.444948] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 751.447039] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.447294] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.447486] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.448191] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.449060] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fdb635-95fa-4081-aa4c-8539a167a9fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.452163] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.452736] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.452933] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 751.453559] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.469127] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ae624a-efa9-4df3-a284-d667dafd2cac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.650278] env[62974]: INFO nova.compute.manager [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Rescuing [ 751.650696] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.651124] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.651849] env[62974]: DEBUG nova.network.neutron [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.756735] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.869s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.757405] env[62974]: DEBUG nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 751.762082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.368s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.762600] env[62974]: INFO nova.compute.claims [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.769856] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:52:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c356ba03-298c-489b-984a-f2eae32bbcc6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1135043868',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.770168] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.770552] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.771676] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.771676] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.771676] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.771676] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.771873] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.772116] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.772528] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.772832] env[62974]: DEBUG nova.virt.hardware [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.778711] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfiguring VM instance instance-00000029 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 751.780080] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23686241-5bf9-4b27-9cf5-1a8e51daa0b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.803244] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 751.803244] env[62974]: value = "task-2654190" [ 751.803244] env[62974]: _type = "Task" [ 751.803244] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.814509] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654190, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.916727] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654189, 'name': ReconfigVM_Task, 'duration_secs': 0.629452} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.917272] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.918637] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3386fafa-4a7b-43fa-b669-df4c04125ee1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.926199] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 751.926199] env[62974]: value = "task-2654191" [ 751.926199] env[62974]: _type = "Task" [ 751.926199] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.937394] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654191, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.938368] env[62974]: DEBUG nova.network.neutron [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.956704] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.981483] env[62974]: DEBUG nova.compute.manager [None req-e3f1a683-8e38-46b8-9dda-eccd5fc13396 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Instance disappeared during snapshot {{(pid=62974) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 752.002646] env[62974]: DEBUG nova.network.neutron [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.128186] env[62974]: DEBUG nova.compute.manager [None req-e3f1a683-8e38-46b8-9dda-eccd5fc13396 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Found 0 images (rotation: 2) {{(pid=62974) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 752.269526] env[62974]: DEBUG nova.compute.utils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 752.271041] env[62974]: DEBUG nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.271727] env[62974]: DEBUG nova.network.neutron [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 752.316181] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654190, 'name': ReconfigVM_Task, 'duration_secs': 0.220246} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.316181] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfigured VM instance instance-00000029 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 752.317011] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9469ab1f-35e0-4ee6-97fd-bd7656a3cf28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.342326] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.346795] env[62974]: DEBUG nova.policy [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8c9920787544ba19e573786bea455c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '224eb10f2811439d8593c7ebfbad908d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 752.348273] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cef043c-29ff-40f3-ba83-6a6260d00b97 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.369982] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 752.369982] env[62974]: value = "task-2654192" [ 752.369982] env[62974]: _type = "Task" [ 752.369982] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.378130] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.438738] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654191, 'name': Rename_Task, 'duration_secs': 0.221513} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.438738] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.438738] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b58b131c-de67-45e7-bf02-44afea2a6638 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.443378] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 752.443378] env[62974]: value = "task-2654193" [ 752.443378] env[62974]: _type = "Task" [ 752.443378] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.453155] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.467043] env[62974]: DEBUG nova.network.neutron [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Updating instance_info_cache with network_info: [{"id": "0576c111-5b07-4ceb-be4b-78e565bd0313", "address": "fa:16:3e:3f:7d:6e", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0576c111-5b", "ovs_interfaceid": "0576c111-5b07-4ceb-be4b-78e565bd0313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.511237] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Releasing lock "refresh_cache-4967d5be-6cd4-4f23-aca4-d9ae11112369" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.511962] env[62974]: DEBUG nova.compute.manager [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 752.513194] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 752.514844] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4c315a-6b78-431b-82f6-a96149167aab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.526892] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 752.529748] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9446439f-17bb-4073-aeef-fc65cd8e1b74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.538784] env[62974]: DEBUG oslo_vmware.api [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 752.538784] env[62974]: value = "task-2654194" [ 752.538784] env[62974]: _type = "Task" [ 752.538784] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.555584] env[62974]: DEBUG oslo_vmware.api [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.699814] env[62974]: DEBUG nova.network.neutron [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Successfully created port: 975d472e-a9c2-416f-9c30-6d3563f96445 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.778643] env[62974]: DEBUG nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 752.882500] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654192, 'name': ReconfigVM_Task, 'duration_secs': 0.472203} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.882665] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.883042] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance '8621428e-cf42-47a4-82c8-a003c377b257' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 752.957153] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654193, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.971926] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.048752] env[62974]: DEBUG oslo_vmware.api [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654194, 'name': PowerOffVM_Task, 'duration_secs': 0.141557} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.052024] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 753.052024] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 753.052024] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31a785b3-7c70-4de4-b047-08caff823f47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.078641] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 753.078865] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 753.079185] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Deleting the datastore file [datastore2] 4967d5be-6cd4-4f23-aca4-d9ae11112369 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.079812] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3d994c2-86b8-4b64-a261-181a4bda2843 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.088031] env[62974]: DEBUG oslo_vmware.api [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for the task: (returnval){ [ 753.088031] env[62974]: value = "task-2654196" [ 753.088031] env[62974]: _type = "Task" [ 753.088031] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.099407] env[62974]: DEBUG oslo_vmware.api [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.390767] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25777c5c-a2a9-442f-8f70-dc3e57489d60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.415921] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c0faed-f750-440f-b291-bb45f46f5ec4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.419417] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddce679-5e6b-477f-b7a6-561286fb0177 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.439241] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance '8621428e-cf42-47a4-82c8-a003c377b257' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 753.445622] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7276d3f-44f5-4445-9bad-ccdd7d47d4f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.458517] env[62974]: DEBUG oslo_vmware.api [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654193, 'name': PowerOnVM_Task, 'duration_secs': 0.94205} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.483079] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.483366] env[62974]: INFO nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Took 8.68 seconds to spawn the instance on the hypervisor. [ 753.483551] env[62974]: DEBUG nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.485558] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4329f957-3ba0-46bc-bbe7-c638879d1dcc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.488753] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41a162a-6a80-4154-9b0a-93c9d1559be5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.499979] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165d326a-c689-4575-a767-d82f8f29c42b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.519792] env[62974]: DEBUG nova.compute.provider_tree [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.598369] env[62974]: DEBUG oslo_vmware.api [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Task: {'id': task-2654196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186028} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.598641] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.598841] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 753.599036] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.599237] env[62974]: INFO nova.compute.manager [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Took 1.09 seconds to destroy the instance on the hypervisor. [ 753.599476] env[62974]: DEBUG oslo.service.loopingcall [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.599668] env[62974]: DEBUG nova.compute.manager [-] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 753.599752] env[62974]: DEBUG nova.network.neutron [-] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.616130] env[62974]: DEBUG nova.network.neutron [-] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.788433] env[62974]: DEBUG nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 753.810465] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 753.810713] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.810867] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 753.811056] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.811215] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 753.811370] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 753.811570] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 753.811742] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 753.811907] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 753.812079] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 753.812275] env[62974]: DEBUG nova.virt.hardware [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 753.813195] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69e78ca-cb0c-4638-9ceb-67ab4f304b61 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.820949] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d395a6b8-162a-4e81-a528-a0a67e9afbb2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.981451] env[62974]: DEBUG nova.network.neutron [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Port 3b60d221-2cab-4e30-8892-d139b511ccc1 binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 754.019732] env[62974]: INFO nova.compute.manager [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Took 42.64 seconds to build instance. [ 754.025202] env[62974]: DEBUG nova.scheduler.client.report [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 754.119676] env[62974]: DEBUG nova.network.neutron [-] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.348179] env[62974]: DEBUG nova.compute.manager [req-a0f81dbf-df3a-470c-bed4-d63e7f763a37 req-0467c0c4-40e0-411c-abd4-ed516188f999 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Received event network-vif-plugged-975d472e-a9c2-416f-9c30-6d3563f96445 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 754.348179] env[62974]: DEBUG oslo_concurrency.lockutils [req-a0f81dbf-df3a-470c-bed4-d63e7f763a37 req-0467c0c4-40e0-411c-abd4-ed516188f999 service nova] Acquiring lock "cf6e4f04-f5f4-46cb-884b-8014af903a10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.348179] env[62974]: DEBUG oslo_concurrency.lockutils [req-a0f81dbf-df3a-470c-bed4-d63e7f763a37 req-0467c0c4-40e0-411c-abd4-ed516188f999 service nova] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.348179] env[62974]: DEBUG oslo_concurrency.lockutils [req-a0f81dbf-df3a-470c-bed4-d63e7f763a37 req-0467c0c4-40e0-411c-abd4-ed516188f999 service nova] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.348179] env[62974]: DEBUG nova.compute.manager [req-a0f81dbf-df3a-470c-bed4-d63e7f763a37 req-0467c0c4-40e0-411c-abd4-ed516188f999 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] No waiting events found dispatching network-vif-plugged-975d472e-a9c2-416f-9c30-6d3563f96445 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 754.348570] env[62974]: WARNING nova.compute.manager [req-a0f81dbf-df3a-470c-bed4-d63e7f763a37 req-0467c0c4-40e0-411c-abd4-ed516188f999 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Received unexpected event network-vif-plugged-975d472e-a9c2-416f-9c30-6d3563f96445 for instance with vm_state building and task_state spawning. [ 754.450994] env[62974]: DEBUG nova.network.neutron [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Successfully updated port: 975d472e-a9c2-416f-9c30-6d3563f96445 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 754.521678] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.521992] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f473316e-6d96-48a3-b248-ac88ee4b0196 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.527324] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2cc229-f1f5-49a2-9072-e913803fc7d4 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.819s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.528014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.767s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.528564] env[62974]: DEBUG nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 754.533575] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.654s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.535296] env[62974]: INFO nova.compute.claims [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 754.538616] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 754.538616] env[62974]: value = "task-2654197" [ 754.538616] env[62974]: _type = "Task" [ 754.538616] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.548220] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654197, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.621430] env[62974]: INFO nova.compute.manager [-] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Took 1.02 seconds to deallocate network for instance. [ 754.953071] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "refresh_cache-cf6e4f04-f5f4-46cb-884b-8014af903a10" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.953431] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquired lock "refresh_cache-cf6e4f04-f5f4-46cb-884b-8014af903a10" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.953431] env[62974]: DEBUG nova.network.neutron [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 755.004309] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "8621428e-cf42-47a4-82c8-a003c377b257-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.004781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.004781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.034143] env[62974]: DEBUG nova.compute.utils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.035530] env[62974]: DEBUG nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 755.035697] env[62974]: DEBUG nova.network.neutron [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 755.040143] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.060534] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654197, 'name': PowerOffVM_Task, 'duration_secs': 0.357758} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.061078] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.062220] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9e45d5-d05d-4a9a-a14f-37dbbf6a0c1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.086958] env[62974]: DEBUG nova.policy [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35fbf7e6197b4a5eb3e59e2d7dcb42a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a095f717f7d4c1e81311a0810eed958', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 755.089238] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9945521-fed7-4e4b-91fa-593545da9646 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.127707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.131802] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.132192] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc19671c-7fc4-45c1-ad05-d317ec61cf9f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.140231] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 755.140231] env[62974]: value = "task-2654198" [ 755.140231] env[62974]: _type = "Task" [ 755.140231] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.150821] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 755.151253] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 755.151628] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.151813] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.152046] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.152333] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-936d6d66-2276-44e5-b0f2-e2bff12513ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.161550] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.161751] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 755.162541] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-081a032c-9e90-412b-bc0a-4c4b692350b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.168738] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 755.168738] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5266549e-05d6-44c8-675f-ec0db3460052" [ 755.168738] env[62974]: _type = "Task" [ 755.168738] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.177421] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5266549e-05d6-44c8-675f-ec0db3460052, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.401661] env[62974]: DEBUG nova.network.neutron [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Successfully created port: 1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.485383] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "65615fd7-c219-4c19-8ecf-11336b616ead" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.485820] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.487487] env[62974]: DEBUG nova.compute.manager [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 755.488263] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38983e4f-d0bb-46f9-8848-5d4e95fa5342 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.496769] env[62974]: DEBUG nova.compute.manager [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 755.497578] env[62974]: DEBUG nova.objects.instance [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'flavor' on Instance uuid 65615fd7-c219-4c19-8ecf-11336b616ead {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.506905] env[62974]: DEBUG nova.network.neutron [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.538747] env[62974]: DEBUG nova.compute.utils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.573645] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.682313] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5266549e-05d6-44c8-675f-ec0db3460052, 'name': SearchDatastore_Task, 'duration_secs': 0.015276} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.688179] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f20019cf-862b-425c-94d3-f3d7148a1ed3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.694250] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 755.694250] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520abc4d-0192-b38f-d643-6fce503b5fc9" [ 755.694250] env[62974]: _type = "Task" [ 755.694250] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.702209] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520abc4d-0192-b38f-d643-6fce503b5fc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.753119] env[62974]: DEBUG nova.network.neutron [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Updating instance_info_cache with network_info: [{"id": "975d472e-a9c2-416f-9c30-6d3563f96445", "address": "fa:16:3e:3d:d6:bd", "network": {"id": "90979921-9f16-4193-8e04-4b81286829be", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1061947455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "224eb10f2811439d8593c7ebfbad908d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap975d472e-a9", "ovs_interfaceid": "975d472e-a9c2-416f-9c30-6d3563f96445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.043435] env[62974]: DEBUG nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 756.059199] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.059398] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.059575] env[62974]: DEBUG nova.network.neutron [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.116661] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8331fd7f-0279-400a-ba32-7267a75dedbe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.124915] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218d0375-6cf4-4e6d-bbd3-c0dea620fdea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.155217] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35658425-55ca-47b1-8487-a4408609e78a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.162916] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8326b0-979d-4b1d-b26d-24f4bf607634 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.176403] env[62974]: DEBUG nova.compute.provider_tree [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.204338] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520abc4d-0192-b38f-d643-6fce503b5fc9, 'name': SearchDatastore_Task, 'duration_secs': 0.01867} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.204598] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.204860] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. {{(pid=62974) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 756.205135] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a16271c-0ed8-4d3b-8fc4-8efa3fe70570 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.211796] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 756.211796] env[62974]: value = "task-2654199" [ 756.211796] env[62974]: _type = "Task" [ 756.211796] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.219735] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.260170] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Releasing lock "refresh_cache-cf6e4f04-f5f4-46cb-884b-8014af903a10" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.260557] env[62974]: DEBUG nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Instance network_info: |[{"id": "975d472e-a9c2-416f-9c30-6d3563f96445", "address": "fa:16:3e:3d:d6:bd", "network": {"id": "90979921-9f16-4193-8e04-4b81286829be", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1061947455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "224eb10f2811439d8593c7ebfbad908d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap975d472e-a9", "ovs_interfaceid": "975d472e-a9c2-416f-9c30-6d3563f96445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 756.260998] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:d6:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba0caf51-f398-43a4-b2b3-f53480254d5f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '975d472e-a9c2-416f-9c30-6d3563f96445', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.268979] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Creating folder: Project (224eb10f2811439d8593c7ebfbad908d). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.269473] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-772bca72-f245-4e0c-adc5-0b69af011a50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.280661] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Created folder: Project (224eb10f2811439d8593c7ebfbad908d) in parent group-v535199. [ 756.280876] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Creating folder: Instances. Parent ref: group-v535345. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.281215] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5455190-fdf4-425b-98db-5ebfba84fb62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.290169] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Created folder: Instances in parent group-v535345. [ 756.290410] env[62974]: DEBUG oslo.service.loopingcall [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.290602] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.290800] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8210584-9530-4b30-92cc-62438180fb23 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.309240] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.309240] env[62974]: value = "task-2654202" [ 756.309240] env[62974]: _type = "Task" [ 756.309240] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.318652] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654202, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.386933] env[62974]: DEBUG nova.compute.manager [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Received event network-changed-975d472e-a9c2-416f-9c30-6d3563f96445 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 756.386933] env[62974]: DEBUG nova.compute.manager [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Refreshing instance network info cache due to event network-changed-975d472e-a9c2-416f-9c30-6d3563f96445. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 756.387048] env[62974]: DEBUG oslo_concurrency.lockutils [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] Acquiring lock "refresh_cache-cf6e4f04-f5f4-46cb-884b-8014af903a10" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.387187] env[62974]: DEBUG oslo_concurrency.lockutils [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] Acquired lock "refresh_cache-cf6e4f04-f5f4-46cb-884b-8014af903a10" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.387348] env[62974]: DEBUG nova.network.neutron [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Refreshing network info cache for port 975d472e-a9c2-416f-9c30-6d3563f96445 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 756.510650] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 756.510970] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a17767a-ee44-4d7e-9a85-d03e0f1d0e15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.518434] env[62974]: DEBUG oslo_vmware.api [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 756.518434] env[62974]: value = "task-2654203" [ 756.518434] env[62974]: _type = "Task" [ 756.518434] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.527156] env[62974]: DEBUG oslo_vmware.api [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.680316] env[62974]: DEBUG nova.scheduler.client.report [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 756.722915] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654199, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.772975] env[62974]: DEBUG nova.network.neutron [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance_info_cache with network_info: [{"id": "3b60d221-2cab-4e30-8892-d139b511ccc1", "address": "fa:16:3e:80:cf:bd", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b60d221-2c", "ovs_interfaceid": "3b60d221-2cab-4e30-8892-d139b511ccc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.819804] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654202, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.028607] env[62974]: DEBUG oslo_vmware.api [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654203, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.053881] env[62974]: DEBUG nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 757.083759] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:50:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='263301372',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-918995055',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 757.084895] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.085111] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 757.085311] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.085461] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 757.085609] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 757.085817] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 757.085977] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 757.086278] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 757.086473] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 757.086656] env[62974]: DEBUG nova.virt.hardware [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 757.087543] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea8f441-e98a-4404-b906-75759ef7f2a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.102597] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7d8816-2b7f-4884-869b-7ad59fcd4943 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.186302] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.187015] env[62974]: DEBUG nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 757.191808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.329s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.192128] env[62974]: DEBUG nova.objects.instance [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lazy-loading 'resources' on Instance uuid 669cd72c-556f-40b6-8bc2-f50a125c182a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 757.203848] env[62974]: DEBUG nova.network.neutron [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Successfully updated port: 1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.224269] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654199, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.7269} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.224533] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. [ 757.225313] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66117d2-58d8-4303-8a28-8cd51e1930b0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.254265] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.255383] env[62974]: DEBUG nova.network.neutron [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Updated VIF entry in instance network info cache for port 975d472e-a9c2-416f-9c30-6d3563f96445. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 757.255727] env[62974]: DEBUG nova.network.neutron [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Updating instance_info_cache with network_info: [{"id": "975d472e-a9c2-416f-9c30-6d3563f96445", "address": "fa:16:3e:3d:d6:bd", "network": {"id": "90979921-9f16-4193-8e04-4b81286829be", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1061947455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "224eb10f2811439d8593c7ebfbad908d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba0caf51-f398-43a4-b2b3-f53480254d5f", "external-id": "nsx-vlan-transportzone-667", "segmentation_id": 667, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap975d472e-a9", "ovs_interfaceid": "975d472e-a9c2-416f-9c30-6d3563f96445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.257096] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-411ba207-f83c-46cd-9dd9-730b9b0ffacf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.275481] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.281265] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 757.281265] env[62974]: value = "task-2654204" [ 757.281265] env[62974]: _type = "Task" [ 757.281265] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.290011] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654204, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.319543] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654202, 'name': CreateVM_Task, 'duration_secs': 0.825188} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.319623] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.320387] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.320557] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.320872] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 757.321196] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1a01aab-d80a-47dd-8b8e-f8e62391d856 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.326349] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 757.326349] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bd0c1d-fc42-4f31-69ae-19853ffadbe1" [ 757.326349] env[62974]: _type = "Task" [ 757.326349] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.335025] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bd0c1d-fc42-4f31-69ae-19853ffadbe1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.532034] env[62974]: DEBUG oslo_vmware.api [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654203, 'name': PowerOffVM_Task, 'duration_secs': 0.561011} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.532407] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 757.532754] env[62974]: DEBUG nova.compute.manager [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.533846] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d53208-9b83-482c-97b1-dbcef2f5432d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.698763] env[62974]: DEBUG nova.compute.utils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 757.700877] env[62974]: DEBUG nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 757.701097] env[62974]: DEBUG nova.network.neutron [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 757.707803] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.707803] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.707933] env[62974]: DEBUG nova.network.neutron [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.740054] env[62974]: DEBUG nova.policy [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a86bbc98ec50467792b3c6a6cedc790b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14dd4a9a77ad40458d40bb82ac4b90a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 757.771776] env[62974]: DEBUG oslo_concurrency.lockutils [req-ab68abf0-6b43-4205-882e-b317a9c146b0 req-68c1db4e-255c-42d3-a404-a718f47d4342 service nova] Releasing lock "refresh_cache-cf6e4f04-f5f4-46cb-884b-8014af903a10" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.799479] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.804692] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a90816-f44a-437c-b85e-effe3b180b8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.828454] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24140d29-2232-46c4-a3c4-b111a311c7dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.839060] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance '8621428e-cf42-47a4-82c8-a003c377b257' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 757.849470] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bd0c1d-fc42-4f31-69ae-19853ffadbe1, 'name': SearchDatastore_Task, 'duration_secs': 0.024569} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.850302] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.850564] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.850803] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.850979] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.851203] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.851468] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa30814b-9ce3-4deb-8abd-c6704d90bac0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.863010] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.863202] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.864045] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb10d0cf-66d8-4be9-8f4d-b086a6b06354 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.869659] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 757.869659] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6aca2-82e8-6c64-fbed-688e56e8ac0c" [ 757.869659] env[62974]: _type = "Task" [ 757.869659] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.880077] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6aca2-82e8-6c64-fbed-688e56e8ac0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.030099] env[62974]: DEBUG nova.network.neutron [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Successfully created port: 331712b7-2ae7-4199-a2b0-e7b880a332e8 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.047780] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d204ed3-e3ad-43d5-87db-bac3f38fdded tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.562s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.204252] env[62974]: DEBUG nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 758.260363] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d51e63-85c6-4def-b835-48583025f744 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.266748] env[62974]: DEBUG nova.network.neutron [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.274194] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd6c980-0ad9-4b84-b3d4-f9565e4c58b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.310739] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6d5c2c-0db8-426e-9f3f-8396e961a6d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.321469] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ae4091-64d5-42c0-bbd3-0fe7b0cb7fdc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.325232] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654204, 'name': ReconfigVM_Task, 'duration_secs': 0.575168} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.325519] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.327658] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7efe488-0546-467c-844b-9c4917f116ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.337067] env[62974]: DEBUG nova.compute.provider_tree [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.363735] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 758.366569] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a41737b-834b-4443-9fa7-e13129937ce8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.368125] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d911c02-5c3e-4737-9428-c83110969582 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.384335] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 758.384335] env[62974]: value = "task-2654205" [ 758.384335] env[62974]: _type = "Task" [ 758.384335] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.392083] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6aca2-82e8-6c64-fbed-688e56e8ac0c, 'name': SearchDatastore_Task, 'duration_secs': 0.02104} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.392393] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 758.392393] env[62974]: value = "task-2654206" [ 758.392393] env[62974]: _type = "Task" [ 758.392393] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.393548] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a28ea71-0394-4898-abf7-3318478004bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.404559] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.407927] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 758.407927] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52769ab4-06e5-540a-55d2-247dc2ec91d3" [ 758.407927] env[62974]: _type = "Task" [ 758.407927] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.410737] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654206, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.418941] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52769ab4-06e5-540a-55d2-247dc2ec91d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.459460] env[62974]: DEBUG nova.compute.manager [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Received event network-vif-plugged-1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 758.459730] env[62974]: DEBUG oslo_concurrency.lockutils [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] Acquiring lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.460055] env[62974]: DEBUG oslo_concurrency.lockutils [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.460300] env[62974]: DEBUG oslo_concurrency.lockutils [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.460512] env[62974]: DEBUG nova.compute.manager [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] No waiting events found dispatching network-vif-plugged-1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 758.460774] env[62974]: WARNING nova.compute.manager [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Received unexpected event network-vif-plugged-1a9b17b6-73d0-4a26-aeb4-00390730c3b0 for instance with vm_state building and task_state spawning. [ 758.461014] env[62974]: DEBUG nova.compute.manager [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Received event network-changed-1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 758.461434] env[62974]: DEBUG nova.compute.manager [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Refreshing instance network info cache due to event network-changed-1a9b17b6-73d0-4a26-aeb4-00390730c3b0. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 758.461505] env[62974]: DEBUG oslo_concurrency.lockutils [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] Acquiring lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.480576] env[62974]: DEBUG nova.network.neutron [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Updating instance_info_cache with network_info: [{"id": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "address": "fa:16:3e:86:24:d9", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a9b17b6-73", "ovs_interfaceid": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.841074] env[62974]: DEBUG nova.scheduler.client.report [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.895015] env[62974]: DEBUG oslo_vmware.api [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654205, 'name': PowerOnVM_Task, 'duration_secs': 0.476851} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.898516] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.898750] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2f0ae0-bd73-4196-ad07-8737852b4b60 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance '8621428e-cf42-47a4-82c8-a003c377b257' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 758.907666] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654206, 'name': ReconfigVM_Task, 'duration_secs': 0.312138} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.908182] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 758.908264] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c021aef-8ce9-437d-aa4e-2cb0df171906 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.916133] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 758.916133] env[62974]: value = "task-2654207" [ 758.916133] env[62974]: _type = "Task" [ 758.916133] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.923618] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52769ab4-06e5-540a-55d2-247dc2ec91d3, 'name': SearchDatastore_Task, 'duration_secs': 0.013658} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.924341] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.924684] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] cf6e4f04-f5f4-46cb-884b-8014af903a10/cf6e4f04-f5f4-46cb-884b-8014af903a10.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 758.924961] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41a79f8e-3108-40e1-9f6e-44d5d12fbf09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.931402] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.936324] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 758.936324] env[62974]: value = "task-2654208" [ 758.936324] env[62974]: _type = "Task" [ 758.936324] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.947543] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.982937] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Releasing lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.983294] env[62974]: DEBUG nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Instance network_info: |[{"id": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "address": "fa:16:3e:86:24:d9", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a9b17b6-73", "ovs_interfaceid": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 758.983609] env[62974]: DEBUG oslo_concurrency.lockutils [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] Acquired lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.983789] env[62974]: DEBUG nova.network.neutron [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Refreshing network info cache for port 1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.985020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:24:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5116f690-f825-4fee-8a47-42b073e716c5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a9b17b6-73d0-4a26-aeb4-00390730c3b0', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.994056] env[62974]: DEBUG oslo.service.loopingcall [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.995190] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 758.995422] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83771eb8-b218-4697-b917-a64ab76f31af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.017463] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.017463] env[62974]: value = "task-2654209" [ 759.017463] env[62974]: _type = "Task" [ 759.017463] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.028658] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654209, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.214832] env[62974]: DEBUG nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 759.237749] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 759.237969] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.238211] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 759.238413] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.238560] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 759.238787] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 759.238956] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 759.239155] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 759.239416] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 759.239509] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 759.239627] env[62974]: DEBUG nova.virt.hardware [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 759.240584] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f069753-5229-4c93-b7d6-1e42c0b2a7c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.250751] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47887046-5371-453c-9630-5a22eabf5013 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.271993] env[62974]: INFO nova.compute.manager [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Rebuilding instance [ 759.320642] env[62974]: DEBUG nova.compute.manager [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.321363] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cec4081-c5c7-4fd8-a185-d34a6a6783dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.347800] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.350951] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.841s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.351973] env[62974]: INFO nova.compute.claims [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.370784] env[62974]: INFO nova.scheduler.client.report [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted allocations for instance 669cd72c-556f-40b6-8bc2-f50a125c182a [ 759.428239] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654207, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.447121] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654208, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.528492] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654209, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.764503] env[62974]: DEBUG nova.network.neutron [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Successfully updated port: 331712b7-2ae7-4199-a2b0-e7b880a332e8 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 759.788892] env[62974]: DEBUG nova.network.neutron [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Updated VIF entry in instance network info cache for port 1a9b17b6-73d0-4a26-aeb4-00390730c3b0. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.789407] env[62974]: DEBUG nova.network.neutron [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Updating instance_info_cache with network_info: [{"id": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "address": "fa:16:3e:86:24:d9", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a9b17b6-73", "ovs_interfaceid": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.877863] env[62974]: DEBUG oslo_concurrency.lockutils [None req-227456b3-71cb-4e80-807d-fe9985ba9e1c tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "669cd72c-556f-40b6-8bc2-f50a125c182a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.736s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.928070] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654207, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.946995] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.789872} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.946995] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] cf6e4f04-f5f4-46cb-884b-8014af903a10/cf6e4f04-f5f4-46cb-884b-8014af903a10.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.947374] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.947512] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a4c459b-d3fa-4e3c-987e-3c33da8ea2a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.954623] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 759.954623] env[62974]: value = "task-2654210" [ 759.954623] env[62974]: _type = "Task" [ 759.954623] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.967183] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.028841] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654209, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.267024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-3bcbcf35-294e-4d58-b002-cb84db4316d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.267576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-3bcbcf35-294e-4d58-b002-cb84db4316d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.267576] env[62974]: DEBUG nova.network.neutron [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.292299] env[62974]: DEBUG oslo_concurrency.lockutils [req-98f12d76-26b6-400b-8ae4-e7cbac27a5b5 req-2581cb62-a3f5-4449-a8fa-764a2e51e9f5 service nova] Releasing lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.338234] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 760.338582] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18100c5b-4d3d-419e-b9f8-500d8511cca9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.345972] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 760.345972] env[62974]: value = "task-2654211" [ 760.345972] env[62974]: _type = "Task" [ 760.345972] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.354630] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 760.354872] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 760.355606] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451a34d6-bf0b-449b-bd52-d4c9a91144a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.364370] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.364761] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76d7d9bf-ced7-4f52-bbaf-8b224a9ef600 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.429695] env[62974]: DEBUG oslo_vmware.api [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654207, 'name': PowerOnVM_Task, 'duration_secs': 1.050118} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.432536] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 760.436138] env[62974]: DEBUG nova.compute.manager [None req-20c5ddbc-56d5-4158-a543-092e05eb9544 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 760.437124] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb7dc7b-6dae-442a-85aa-38bfbfe2d0c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.467119] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063771} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.467397] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.468202] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981e55a2-8876-4033-972e-0ddfe1bbaa9a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.493760] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] cf6e4f04-f5f4-46cb-884b-8014af903a10/cf6e4f04-f5f4-46cb-884b-8014af903a10.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.497294] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-884ca5ff-ec6d-4829-b867-e80d924acbbb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.517835] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 760.517835] env[62974]: value = "task-2654213" [ 760.517835] env[62974]: _type = "Task" [ 760.517835] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.532341] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654213, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.534557] env[62974]: DEBUG nova.compute.manager [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Received event network-vif-plugged-331712b7-2ae7-4199-a2b0-e7b880a332e8 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 760.534769] env[62974]: DEBUG oslo_concurrency.lockutils [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] Acquiring lock "3bcbcf35-294e-4d58-b002-cb84db4316d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.534977] env[62974]: DEBUG oslo_concurrency.lockutils [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.535122] env[62974]: DEBUG oslo_concurrency.lockutils [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.535295] env[62974]: DEBUG nova.compute.manager [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] No waiting events found dispatching network-vif-plugged-331712b7-2ae7-4199-a2b0-e7b880a332e8 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 760.535455] env[62974]: WARNING nova.compute.manager [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Received unexpected event network-vif-plugged-331712b7-2ae7-4199-a2b0-e7b880a332e8 for instance with vm_state building and task_state spawning. [ 760.535610] env[62974]: DEBUG nova.compute.manager [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Received event network-changed-331712b7-2ae7-4199-a2b0-e7b880a332e8 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 760.535783] env[62974]: DEBUG nova.compute.manager [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Refreshing instance network info cache due to event network-changed-331712b7-2ae7-4199-a2b0-e7b880a332e8. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 760.536039] env[62974]: DEBUG oslo_concurrency.lockutils [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] Acquiring lock "refresh_cache-3bcbcf35-294e-4d58-b002-cb84db4316d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.539604] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654209, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.816689] env[62974]: DEBUG nova.network.neutron [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.924322] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6dc538-6411-4f23-86fb-d1f975841c46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.934928] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09570e39-90ac-49fc-99bb-8dd76b947b6e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.971253] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2cf218-812d-4637-94c6-ed5afa4262d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.976937] env[62974]: DEBUG nova.network.neutron [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Updating instance_info_cache with network_info: [{"id": "331712b7-2ae7-4199-a2b0-e7b880a332e8", "address": "fa:16:3e:f9:27:a0", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap331712b7-2a", "ovs_interfaceid": "331712b7-2ae7-4199-a2b0-e7b880a332e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.985197] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afae811-2b3d-4e69-80eb-4877e69d1e5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.002432] env[62974]: DEBUG nova.compute.provider_tree [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.031613] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654213, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.035275] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654209, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.189821] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52be1b34-cd68-aeda-e152-6f3ce1cb74ce/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 761.191331] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3987739a-2c45-4d6b-bece-d8aa9c53237a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.200283] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52be1b34-cd68-aeda-e152-6f3ce1cb74ce/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 761.200617] env[62974]: ERROR oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52be1b34-cd68-aeda-e152-6f3ce1cb74ce/disk-0.vmdk due to incomplete transfer. [ 761.200985] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-99c0bfd2-d70f-4339-886d-769c105257b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.209099] env[62974]: DEBUG oslo_vmware.rw_handles [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52be1b34-cd68-aeda-e152-6f3ce1cb74ce/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 761.209420] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Uploaded image ef61c836-da44-4806-95f8-83ec0dadbfaa to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 761.212756] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 761.213686] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-25ca3859-cbe1-4f6a-8590-89d254402244 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.220263] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 761.220263] env[62974]: value = "task-2654214" [ 761.220263] env[62974]: _type = "Task" [ 761.220263] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.228642] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654214, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.480364] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-3bcbcf35-294e-4d58-b002-cb84db4316d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.481935] env[62974]: DEBUG nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Instance network_info: |[{"id": "331712b7-2ae7-4199-a2b0-e7b880a332e8", "address": "fa:16:3e:f9:27:a0", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap331712b7-2a", "ovs_interfaceid": "331712b7-2ae7-4199-a2b0-e7b880a332e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 761.481935] env[62974]: DEBUG oslo_concurrency.lockutils [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] Acquired lock "refresh_cache-3bcbcf35-294e-4d58-b002-cb84db4316d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.482179] env[62974]: DEBUG nova.network.neutron [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Refreshing network info cache for port 331712b7-2ae7-4199-a2b0-e7b880a332e8 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.484145] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:27:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '331712b7-2ae7-4199-a2b0-e7b880a332e8', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 761.491344] env[62974]: DEBUG oslo.service.loopingcall [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 761.492070] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 761.492332] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fce8c14c-b6e5-40bc-8d43-c6904fa34f66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.509437] env[62974]: DEBUG nova.network.neutron [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Port 3b60d221-2cab-4e30-8892-d139b511ccc1 binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 761.509706] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.509849] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.510017] env[62974]: DEBUG nova.network.neutron [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.511765] env[62974]: DEBUG nova.scheduler.client.report [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 761.521073] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 761.521073] env[62974]: value = "task-2654215" [ 761.521073] env[62974]: _type = "Task" [ 761.521073] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.536812] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654213, 'name': ReconfigVM_Task, 'duration_secs': 0.600331} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.542942] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Reconfigured VM instance instance-00000034 to attach disk [datastore1] cf6e4f04-f5f4-46cb-884b-8014af903a10/cf6e4f04-f5f4-46cb-884b-8014af903a10.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.543572] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654215, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.544037] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654209, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.544250] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3c5677d-f4fa-4e1f-9f7f-8c638f50d126 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.554349] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 761.554349] env[62974]: value = "task-2654216" [ 761.554349] env[62974]: _type = "Task" [ 761.554349] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.563866] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654216, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.686587] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 761.686850] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 761.687062] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleting the datastore file [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 761.687783] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45d511cd-4c66-4f88-9830-dcc966b3d088 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.694398] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 761.694398] env[62974]: value = "task-2654217" [ 761.694398] env[62974]: _type = "Task" [ 761.694398] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.704101] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.730954] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654214, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.017169] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.017541] env[62974]: DEBUG nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 762.022840] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.274s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.024403] env[62974]: INFO nova.compute.claims [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.041335] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654215, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.045751] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654209, 'name': CreateVM_Task, 'duration_secs': 2.753085} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.045751] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 762.045751] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.045751] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.046352] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 762.046352] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98502e35-7ed4-45c8-8f41-de9c2411d5d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.051034] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 762.051034] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525012dc-adfc-7367-b256-94c577a3a2f7" [ 762.051034] env[62974]: _type = "Task" [ 762.051034] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.055757] env[62974]: INFO nova.compute.manager [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Unrescuing [ 762.055757] env[62974]: DEBUG oslo_concurrency.lockutils [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.055921] env[62974]: DEBUG oslo_concurrency.lockutils [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.056061] env[62974]: DEBUG nova.network.neutron [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.067052] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525012dc-adfc-7367-b256-94c577a3a2f7, 'name': SearchDatastore_Task, 'duration_secs': 0.011739} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.072658] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.072912] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 762.073166] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.073739] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.073739] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.073920] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654216, 'name': Rename_Task, 'duration_secs': 0.213819} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.075553] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4245be9-597e-4cd8-b396-0f7fb81e849b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.077811] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 762.078315] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51de3fba-3040-4b5f-ac9a-ee84a60a0c90 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.088023] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 762.088023] env[62974]: value = "task-2654218" [ 762.088023] env[62974]: _type = "Task" [ 762.088023] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.088023] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.088300] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 762.091519] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-142b3601-c127-46cb-9d1e-5f450cefb878 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.097034] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 762.097034] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52857b2a-c983-c798-d1f3-5863a7196c16" [ 762.097034] env[62974]: _type = "Task" [ 762.097034] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.100330] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.107808] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52857b2a-c983-c798-d1f3-5863a7196c16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.206892] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282903} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.209408] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 762.209562] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 762.209738] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 762.233977] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654214, 'name': Destroy_Task, 'duration_secs': 0.688904} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.234443] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Destroyed the VM [ 762.234878] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 762.235290] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1ffbfef2-0467-46fc-973c-5879bebcf4d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.242366] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 762.242366] env[62974]: value = "task-2654219" [ 762.242366] env[62974]: _type = "Task" [ 762.242366] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.251650] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654219, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.325359] env[62974]: DEBUG nova.network.neutron [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Updated VIF entry in instance network info cache for port 331712b7-2ae7-4199-a2b0-e7b880a332e8. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 762.325833] env[62974]: DEBUG nova.network.neutron [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Updating instance_info_cache with network_info: [{"id": "331712b7-2ae7-4199-a2b0-e7b880a332e8", "address": "fa:16:3e:f9:27:a0", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap331712b7-2a", "ovs_interfaceid": "331712b7-2ae7-4199-a2b0-e7b880a332e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.349386] env[62974]: DEBUG nova.network.neutron [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance_info_cache with network_info: [{"id": "3b60d221-2cab-4e30-8892-d139b511ccc1", "address": "fa:16:3e:80:cf:bd", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b60d221-2c", "ovs_interfaceid": "3b60d221-2cab-4e30-8892-d139b511ccc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.525035] env[62974]: DEBUG nova.compute.utils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 762.527440] env[62974]: DEBUG nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 762.527628] env[62974]: DEBUG nova.network.neutron [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 762.543480] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654215, 'name': CreateVM_Task, 'duration_secs': 0.528378} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.543660] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 762.545039] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.545220] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.545541] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 762.546015] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-689cf278-bcf7-411c-9fe5-f096e0da5ab6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.551382] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 762.551382] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5241185d-cc02-290c-578a-6c311a4e40a0" [ 762.551382] env[62974]: _type = "Task" [ 762.551382] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.559952] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5241185d-cc02-290c-578a-6c311a4e40a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.578956] env[62974]: DEBUG nova.policy [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d12189f3f8946eead2ca2fdacd9c8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0575ed5c28314e939bf91ea58759bf82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 762.597110] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654218, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.609052] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52857b2a-c983-c798-d1f3-5863a7196c16, 'name': SearchDatastore_Task, 'duration_secs': 0.01497} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.610255] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7bc43f0-b3ce-4167-b5da-52e674d1c679 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.615743] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 762.615743] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5231e250-0ca5-d3c0-d4a4-6aa8f1096014" [ 762.615743] env[62974]: _type = "Task" [ 762.615743] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.627131] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5231e250-0ca5-d3c0-d4a4-6aa8f1096014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.755687] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654219, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.828612] env[62974]: DEBUG oslo_concurrency.lockutils [req-5303ee33-7a79-429e-b8c5-ab0beb504caf req-7907c618-a6b5-45d1-a8cb-435ad98375f8 service nova] Releasing lock "refresh_cache-3bcbcf35-294e-4d58-b002-cb84db4316d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.852459] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.890931] env[62974]: DEBUG nova.network.neutron [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Updating instance_info_cache with network_info: [{"id": "0576c111-5b07-4ceb-be4b-78e565bd0313", "address": "fa:16:3e:3f:7d:6e", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0576c111-5b", "ovs_interfaceid": "0576c111-5b07-4ceb-be4b-78e565bd0313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.995069] env[62974]: DEBUG nova.network.neutron [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Successfully created port: 618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.031074] env[62974]: DEBUG nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 763.066050] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5241185d-cc02-290c-578a-6c311a4e40a0, 'name': SearchDatastore_Task, 'duration_secs': 0.0122} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.066050] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.066050] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 763.066283] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.101813] env[62974]: DEBUG oslo_vmware.api [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654218, 'name': PowerOnVM_Task, 'duration_secs': 0.856181} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.101813] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 763.101813] env[62974]: INFO nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Took 9.31 seconds to spawn the instance on the hypervisor. [ 763.101813] env[62974]: DEBUG nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 763.103997] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4503d26d-b525-4f1e-9ba2-bcbcedfaca2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.130064] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5231e250-0ca5-d3c0-d4a4-6aa8f1096014, 'name': SearchDatastore_Task, 'duration_secs': 0.025858} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.131031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.131031] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c38cddae-95b3-4f4a-bf3a-5f0bdde548a9/c38cddae-95b3-4f4a-bf3a-5f0bdde548a9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.133620] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.133829] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.134075] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f25fb7ca-c26a-4650-a5f2-e176e085f497 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.136488] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a2bb4af-138b-4d85-aca8-88fffd24eb28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.142330] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 763.142330] env[62974]: value = "task-2654220" [ 763.142330] env[62974]: _type = "Task" [ 763.142330] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.146705] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.146888] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 763.150474] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa165292-df97-48de-affe-4ca421640a4f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.155978] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.159688] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 763.159688] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521d4197-d97c-f1a1-7758-af231b8db5bd" [ 763.159688] env[62974]: _type = "Task" [ 763.159688] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.171710] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521d4197-d97c-f1a1-7758-af231b8db5bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.253444] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 763.254207] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.254315] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 763.254442] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.254584] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 763.254733] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 763.254955] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 763.255132] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 763.255536] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 763.255715] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 763.255902] env[62974]: DEBUG nova.virt.hardware [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 763.256790] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fd77be-8731-44c0-af32-d34fc6764d67 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.267587] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654219, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.271257] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b88ae24-a165-4513-b32f-dbbebfb50c43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.288780] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:a2:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58ed2814-e050-4f6f-9847-7912e525e286', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.297178] env[62974]: DEBUG oslo.service.loopingcall [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.300817] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 763.301392] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25d3b384-d872-4444-b7c0-63eee568d28b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.322384] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.322384] env[62974]: value = "task-2654221" [ 763.322384] env[62974]: _type = "Task" [ 763.322384] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.329868] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654221, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.355882] env[62974]: DEBUG nova.compute.manager [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62974) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 763.356161] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.393708] env[62974]: DEBUG oslo_concurrency.lockutils [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-3426d512-d54e-4852-8eca-8ba9f5fef418" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.394485] env[62974]: DEBUG nova.objects.instance [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lazy-loading 'flavor' on Instance uuid 3426d512-d54e-4852-8eca-8ba9f5fef418 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.628522] env[62974]: INFO nova.compute.manager [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Took 45.57 seconds to build instance. [ 763.650614] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698b1162-caf0-40a7-99fa-d20e72cab977 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.664438] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058ceedf-1cbd-4080-8ea4-531b5915ed71 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.668053] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654220, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.710016] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a6f270-02f2-43b8-82a6-36d26086dec4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.712525] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521d4197-d97c-f1a1-7758-af231b8db5bd, 'name': SearchDatastore_Task, 'duration_secs': 0.009124} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.713866] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0692777-1a07-4b44-8967-3602989fd13c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.720045] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb84536c-a7d4-44ee-97e4-ef9391c62ada {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.725118] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 763.725118] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520d4d04-501c-aecd-1269-87de0aa6946d" [ 763.725118] env[62974]: _type = "Task" [ 763.725118] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.736310] env[62974]: DEBUG nova.compute.provider_tree [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.742945] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520d4d04-501c-aecd-1269-87de0aa6946d, 'name': SearchDatastore_Task, 'duration_secs': 0.017567} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.743210] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.743604] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3bcbcf35-294e-4d58-b002-cb84db4316d5/3bcbcf35-294e-4d58-b002-cb84db4316d5.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.743715] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13e250b5-a536-498f-b817-1c2062e2280e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.760148] env[62974]: DEBUG oslo_vmware.api [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654219, 'name': RemoveSnapshot_Task, 'duration_secs': 1.030943} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.760487] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 763.760487] env[62974]: value = "task-2654222" [ 763.760487] env[62974]: _type = "Task" [ 763.760487] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.760855] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 763.761153] env[62974]: INFO nova.compute.manager [None req-246e48ee-374a-4401-a178-ac7537bb37a5 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Took 18.26 seconds to snapshot the instance on the hypervisor. [ 763.772589] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654222, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.832923] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654221, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.902852] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf45b8cf-17d0-4ec2-af3f-e97d7f23fd5b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.926958] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 763.927380] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fccafd2-b2d5-4bde-96d9-047644fa40bd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.934404] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 763.934404] env[62974]: value = "task-2654223" [ 763.934404] env[62974]: _type = "Task" [ 763.934404] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.942468] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.046208] env[62974]: DEBUG nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 764.072062] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.072362] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.072537] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.072718] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.072860] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.073010] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.073219] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.073382] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.073539] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.073695] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.073857] env[62974]: DEBUG nova.virt.hardware [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.074745] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc801f3-5b51-4004-82ea-932f45c670ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.083285] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778b986c-d974-4a6d-93e5-5eea84316ec4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.130521] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7d0b65-b547-4b38-839e-ad9801479ac4 tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.484s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.155019] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565004} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.155019] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c38cddae-95b3-4f4a-bf3a-5f0bdde548a9/c38cddae-95b3-4f4a-bf3a-5f0bdde548a9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.155019] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.155019] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52bd51fc-a700-4cf8-933c-a39a1a54d3e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.160313] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 764.160313] env[62974]: value = "task-2654224" [ 764.160313] env[62974]: _type = "Task" [ 764.160313] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.167732] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.239931] env[62974]: DEBUG nova.scheduler.client.report [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.261720] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "cf6e4f04-f5f4-46cb-884b-8014af903a10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.261989] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.262215] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "cf6e4f04-f5f4-46cb-884b-8014af903a10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.262458] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.262566] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.270378] env[62974]: INFO nova.compute.manager [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Terminating instance [ 764.286756] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654222, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.335369] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654221, 'name': CreateVM_Task, 'duration_secs': 0.629183} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.335476] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.336110] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.336280] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.336607] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 764.336919] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-191d675d-b937-402c-bb5d-70595f0bbbac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.341490] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 764.341490] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528e40f7-ea46-220b-1242-97482bde6c64" [ 764.341490] env[62974]: _type = "Task" [ 764.341490] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.349460] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528e40f7-ea46-220b-1242-97482bde6c64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.444169] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.506820] env[62974]: DEBUG nova.network.neutron [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Successfully updated port: 618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.528330] env[62974]: DEBUG nova.compute.manager [req-9ea6f066-d81b-439a-8a66-5cd35ef6ce4c req-3678107e-3f78-4f16-9cc7-a8cd7e51671d service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Received event network-vif-plugged-618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 764.528886] env[62974]: DEBUG oslo_concurrency.lockutils [req-9ea6f066-d81b-439a-8a66-5cd35ef6ce4c req-3678107e-3f78-4f16-9cc7-a8cd7e51671d service nova] Acquiring lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.528886] env[62974]: DEBUG oslo_concurrency.lockutils [req-9ea6f066-d81b-439a-8a66-5cd35ef6ce4c req-3678107e-3f78-4f16-9cc7-a8cd7e51671d service nova] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.529027] env[62974]: DEBUG oslo_concurrency.lockutils [req-9ea6f066-d81b-439a-8a66-5cd35ef6ce4c req-3678107e-3f78-4f16-9cc7-a8cd7e51671d service nova] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.529178] env[62974]: DEBUG nova.compute.manager [req-9ea6f066-d81b-439a-8a66-5cd35ef6ce4c req-3678107e-3f78-4f16-9cc7-a8cd7e51671d service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] No waiting events found dispatching network-vif-plugged-618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 764.529372] env[62974]: WARNING nova.compute.manager [req-9ea6f066-d81b-439a-8a66-5cd35ef6ce4c req-3678107e-3f78-4f16-9cc7-a8cd7e51671d service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Received unexpected event network-vif-plugged-618880a5-40af-4192-80d0-09a7533719d1 for instance with vm_state building and task_state spawning. [ 764.634030] env[62974]: DEBUG nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.669538] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.336981} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.670432] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.671211] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873433f1-8f9b-4b04-a8a4-624179267268 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.693513] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] c38cddae-95b3-4f4a-bf3a-5f0bdde548a9/c38cddae-95b3-4f4a-bf3a-5f0bdde548a9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.695147] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c03d5ff0-5014-400b-9325-d5a19fac847a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.715762] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 764.715762] env[62974]: value = "task-2654225" [ 764.715762] env[62974]: _type = "Task" [ 764.715762] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.724479] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.745835] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.747025] env[62974]: DEBUG nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 764.749011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.235s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.749346] env[62974]: DEBUG nova.objects.instance [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lazy-loading 'resources' on Instance uuid d941a678-1b67-4e0f-8806-e6682ef21774 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.774781] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654222, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.748368} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.775124] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3bcbcf35-294e-4d58-b002-cb84db4316d5/3bcbcf35-294e-4d58-b002-cb84db4316d5.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.775390] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.775715] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1ef64ee5-042c-4408-83c0-5cfdc96d8e10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.781905] env[62974]: DEBUG nova.compute.manager [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 764.782125] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.782849] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 764.782849] env[62974]: value = "task-2654226" [ 764.782849] env[62974]: _type = "Task" [ 764.782849] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.783605] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e7caf2-a363-440e-b581-4a6b6742480f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.794243] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.797092] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3684b9cc-9404-4e0b-9038-b15dbde4d075 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.798844] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.805403] env[62974]: DEBUG oslo_vmware.api [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 764.805403] env[62974]: value = "task-2654227" [ 764.805403] env[62974]: _type = "Task" [ 764.805403] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.817121] env[62974]: DEBUG oslo_vmware.api [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.852615] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528e40f7-ea46-220b-1242-97482bde6c64, 'name': SearchDatastore_Task, 'duration_secs': 0.058215} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.852940] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.853192] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.853425] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.853572] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.853749] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.854016] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93bd6e07-5de3-461f-b5b7-7d143f7aae48 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.862570] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.862786] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.863558] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a6fcf3e-e6f4-4362-9571-cc829c59f47b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.868813] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 764.868813] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c45285-3f6e-8afa-4a99-a56a76ece27a" [ 764.868813] env[62974]: _type = "Task" [ 764.868813] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.876806] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c45285-3f6e-8afa-4a99-a56a76ece27a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.944222] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654223, 'name': PowerOffVM_Task, 'duration_secs': 0.60071} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.944531] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.949892] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Reconfiguring VM instance instance-0000002e to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 764.950266] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53361a9f-73e3-4349-9e05-5e6f2048f06d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.968099] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 764.968099] env[62974]: value = "task-2654228" [ 764.968099] env[62974]: _type = "Task" [ 764.968099] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.976205] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654228, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.010965] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.011126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.011287] env[62974]: DEBUG nova.network.neutron [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.155646] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.226011] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654225, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.252404] env[62974]: DEBUG nova.compute.utils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 765.257022] env[62974]: DEBUG nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 765.257130] env[62974]: DEBUG nova.network.neutron [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 765.298300] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080608} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.299697] env[62974]: DEBUG nova.policy [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9375e76079f4f3e88387eac123780ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17bfed0a840e43b18856a7a33ec4bafc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 765.301071] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.303702] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e25ac0-9fed-42c5-b29f-48e6b06ca597 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.328555] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 3bcbcf35-294e-4d58-b002-cb84db4316d5/3bcbcf35-294e-4d58-b002-cb84db4316d5.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.335225] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4af81681-772b-4ed4-ac85-c511af91c9cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.349382] env[62974]: DEBUG oslo_vmware.api [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654227, 'name': PowerOffVM_Task, 'duration_secs': 0.265853} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.350105] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 765.350265] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 765.350855] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b600c51a-5bf1-44d0-adcd-9cdba664fef6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.356013] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 765.356013] env[62974]: value = "task-2654229" [ 765.356013] env[62974]: _type = "Task" [ 765.356013] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.363810] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654229, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.380651] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c45285-3f6e-8afa-4a99-a56a76ece27a, 'name': SearchDatastore_Task, 'duration_secs': 0.02507} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.381427] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c1f7b0f-5e00-48b3-90bb-bea19c0dbb97 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.387692] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 765.387692] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d6d757-c8ff-e460-a0c7-0eb4b78f2c11" [ 765.387692] env[62974]: _type = "Task" [ 765.387692] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.395262] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d6d757-c8ff-e460-a0c7-0eb4b78f2c11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.420864] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 765.420994] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 765.421201] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Deleting the datastore file [datastore1] cf6e4f04-f5f4-46cb-884b-8014af903a10 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 765.424055] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e327b756-731f-4d8d-a0ad-775444317fb4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.429794] env[62974]: DEBUG oslo_vmware.api [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for the task: (returnval){ [ 765.429794] env[62974]: value = "task-2654231" [ 765.429794] env[62974]: _type = "Task" [ 765.429794] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.437542] env[62974]: DEBUG oslo_vmware.api [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.478462] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654228, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.550605] env[62974]: DEBUG nova.network.neutron [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.704346] env[62974]: DEBUG nova.network.neutron [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Successfully created port: eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.718612] env[62974]: DEBUG nova.compute.manager [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 765.722652] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6dfe01-c75a-4b45-91c1-ad413e8654b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.734028] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654225, 'name': ReconfigVM_Task, 'duration_secs': 0.783732} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.736837] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Reconfigured VM instance instance-00000035 to attach disk [datastore1] c38cddae-95b3-4f4a-bf3a-5f0bdde548a9/c38cddae-95b3-4f4a-bf3a-5f0bdde548a9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.737325] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62974) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 765.738185] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-db373ed1-363d-419e-aae2-3879020aad01 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.746578] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 765.746578] env[62974]: value = "task-2654232" [ 765.746578] env[62974]: _type = "Task" [ 765.746578] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.758026] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654232, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.758471] env[62974]: DEBUG nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 765.773494] env[62974]: DEBUG nova.network.neutron [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updating instance_info_cache with network_info: [{"id": "618880a5-40af-4192-80d0-09a7533719d1", "address": "fa:16:3e:8f:95:9b", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap618880a5-40", "ovs_interfaceid": "618880a5-40af-4192-80d0-09a7533719d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.789560] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76e68d5-ad49-42eb-b59e-02b6832ef60b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.797375] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f63b2b-91c0-425e-8ec3-45fc500f4fba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.829349] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce96bf8-9f2c-4d93-9ed1-d165466e2d09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.836684] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af036351-9371-405e-963c-8dd7b325b964 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.850238] env[62974]: DEBUG nova.compute.provider_tree [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.865484] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.901158] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d6d757-c8ff-e460-a0c7-0eb4b78f2c11, 'name': SearchDatastore_Task, 'duration_secs': 0.015501} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.901518] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.903015] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 765.903015] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d09b5a52-8fd5-41d5-a007-4b83289b57bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.907926] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 765.907926] env[62974]: value = "task-2654233" [ 765.907926] env[62974]: _type = "Task" [ 765.907926] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.915715] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654233, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.940237] env[62974]: DEBUG oslo_vmware.api [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Task: {'id': task-2654231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.44116} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.940534] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.940856] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.941099] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.941388] env[62974]: INFO nova.compute.manager [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Took 1.16 seconds to destroy the instance on the hypervisor. [ 765.941659] env[62974]: DEBUG oslo.service.loopingcall [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.941863] env[62974]: DEBUG nova.compute.manager [-] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.941990] env[62974]: DEBUG nova.network.neutron [-] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.979883] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654228, 'name': ReconfigVM_Task, 'duration_secs': 0.580435} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.980158] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Reconfigured VM instance instance-0000002e to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 765.980158] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.980411] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b7eaf84-3b7c-4ed3-8004-eabbb8ddad50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.987269] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 765.987269] env[62974]: value = "task-2654234" [ 765.987269] env[62974]: _type = "Task" [ 765.987269] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.003370] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.239608] env[62974]: INFO nova.compute.manager [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] instance snapshotting [ 766.242707] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2fcec3-8c65-4db7-9821-d8f8e12ff684 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.268829] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af1a38a-e669-4e48-8517-edb86334c229 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.275157] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654232, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.050734} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.275907] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62974) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 766.276777] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79de31f-2bce-46f3-bf55-47afacbc67d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.281456] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Releasing lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.281770] env[62974]: DEBUG nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Instance network_info: |[{"id": "618880a5-40af-4192-80d0-09a7533719d1", "address": "fa:16:3e:8f:95:9b", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap618880a5-40", "ovs_interfaceid": "618880a5-40af-4192-80d0-09a7533719d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 766.284284] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:95:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d859f07-052d-4a69-bdf1-24261a6a6daa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '618880a5-40af-4192-80d0-09a7533719d1', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.293131] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Creating folder: Project (0575ed5c28314e939bf91ea58759bf82). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.293131] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-442b7113-e113-4735-8a60-4880bbb98b73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.317106] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] c38cddae-95b3-4f4a-bf3a-5f0bdde548a9/ephemeral_0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.321024] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0721f91-b89a-4c2e-af77-cab87ad4e846 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.334014] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Created folder: Project (0575ed5c28314e939bf91ea58759bf82) in parent group-v535199. [ 766.334207] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Creating folder: Instances. Parent ref: group-v535351. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.334828] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fd4fe38-87b7-4b4f-be91-c4bceb8371a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.340242] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 766.340242] env[62974]: value = "task-2654236" [ 766.340242] env[62974]: _type = "Task" [ 766.340242] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.345559] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Created folder: Instances in parent group-v535351. [ 766.345836] env[62974]: DEBUG oslo.service.loopingcall [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.346066] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 766.346295] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31533945-60a4-424b-9837-3b6f44063a4b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.365439] env[62974]: DEBUG nova.scheduler.client.report [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 766.368977] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654236, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.377917] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654229, 'name': ReconfigVM_Task, 'duration_secs': 0.666351} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.379368] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 3bcbcf35-294e-4d58-b002-cb84db4316d5/3bcbcf35-294e-4d58-b002-cb84db4316d5.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.379989] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.379989] env[62974]: value = "task-2654238" [ 766.379989] env[62974]: _type = "Task" [ 766.379989] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.380199] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90fce95c-e10e-45c9-9a75-42de26890aaa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.389955] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 766.389955] env[62974]: value = "task-2654239" [ 766.389955] env[62974]: _type = "Task" [ 766.389955] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.393274] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654238, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.402914] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654239, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.419112] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654233, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.498481] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654234, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.718828] env[62974]: DEBUG nova.network.neutron [-] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.747676] env[62974]: DEBUG nova.compute.manager [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Received event network-changed-618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 766.747879] env[62974]: DEBUG nova.compute.manager [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Refreshing instance network info cache due to event network-changed-618880a5-40af-4192-80d0-09a7533719d1. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 766.748317] env[62974]: DEBUG oslo_concurrency.lockutils [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] Acquiring lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.748473] env[62974]: DEBUG oslo_concurrency.lockutils [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] Acquired lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.748646] env[62974]: DEBUG nova.network.neutron [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Refreshing network info cache for port 618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.769670] env[62974]: DEBUG nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 766.793774] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 766.794038] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 766.794202] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 766.794387] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 766.794531] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 766.794676] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 766.794876] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 766.795067] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 766.795255] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 766.795420] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 766.795591] env[62974]: DEBUG nova.virt.hardware [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 766.796535] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 766.797341] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fe710c-4843-43a7-b508-2e69aa4b8590 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.799898] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-325784e6-9502-434d-b67e-6856fec989a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.808276] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27c0c62-555e-4a8f-92eb-7c18d5a0d0c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.814865] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 766.814865] env[62974]: value = "task-2654240" [ 766.814865] env[62974]: _type = "Task" [ 766.814865] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.832181] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654240, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.849936] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654236, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.870980] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.873486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.600s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.875088] env[62974]: INFO nova.compute.claims [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.893142] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654238, 'name': CreateVM_Task, 'duration_secs': 0.37449} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.893680] env[62974]: INFO nova.scheduler.client.report [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted allocations for instance d941a678-1b67-4e0f-8806-e6682ef21774 [ 766.894662] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 766.897508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.898946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.898946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 766.901893] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec90d68d-c674-4527-9387-cf82fe58f08b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.908637] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654239, 'name': Rename_Task, 'duration_secs': 0.167468} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.911548] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.911548] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 766.911548] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525031d9-2d20-8381-ab37-e4dcfb741327" [ 766.911548] env[62974]: _type = "Task" [ 766.911548] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.915035] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7fa8998-4697-4277-9c71-20c82d44fa8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.925175] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654233, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530185} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.931850] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 766.932080] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 766.932445] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 766.932445] env[62974]: value = "task-2654241" [ 766.932445] env[62974]: _type = "Task" [ 766.932445] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.933150] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525031d9-2d20-8381-ab37-e4dcfb741327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.933369] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e1148cb-1e14-4ef1-b21b-3784b80b2dbc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.945315] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654241, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.945524] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 766.945524] env[62974]: value = "task-2654242" [ 766.945524] env[62974]: _type = "Task" [ 766.945524] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.954317] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.000316] env[62974]: DEBUG oslo_vmware.api [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654234, 'name': PowerOnVM_Task, 'duration_secs': 0.600058} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.000797] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.001103] env[62974]: DEBUG nova.compute.manager [None req-906df8ec-e445-43ec-8e5a-9aedacca753f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.002057] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f822e5-75b7-4be0-a5ec-9c1496a01753 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.221240] env[62974]: INFO nova.compute.manager [-] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Took 1.28 seconds to deallocate network for instance. [ 767.278591] env[62974]: DEBUG nova.network.neutron [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Successfully updated port: eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.321854] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654240, 'name': CreateSnapshot_Task, 'duration_secs': 0.473045} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.324379] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 767.325115] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa70f3d7-3e5d-41cc-9d78-bc04204e283f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.351259] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654236, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.406105] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7e70623e-ac0a-4d24-9350-46f8252a45d3 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "d941a678-1b67-4e0f-8806-e6682ef21774" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.795s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.427582] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525031d9-2d20-8381-ab37-e4dcfb741327, 'name': SearchDatastore_Task, 'duration_secs': 0.017291} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.427873] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.428116] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 767.428376] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.428527] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.428701] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 767.428944] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a27acc53-bf92-4e9f-93cd-0e814e7c3fd5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.441429] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 767.441583] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 767.445014] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40ebccab-0aa3-4687-8ce7-56ebccc65368 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.447104] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.450730] env[62974]: DEBUG nova.network.neutron [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updated VIF entry in instance network info cache for port 618880a5-40af-4192-80d0-09a7533719d1. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 767.451063] env[62974]: DEBUG nova.network.neutron [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updating instance_info_cache with network_info: [{"id": "618880a5-40af-4192-80d0-09a7533719d1", "address": "fa:16:3e:8f:95:9b", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap618880a5-40", "ovs_interfaceid": "618880a5-40af-4192-80d0-09a7533719d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.453253] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 767.453253] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521553c5-a3c3-a108-c5b7-6f49ddb90d75" [ 767.453253] env[62974]: _type = "Task" [ 767.453253] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.458743] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066184} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.459322] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.460465] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1a121c-a6ea-474a-9a1e-4c60d6719edf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.465611] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521553c5-a3c3-a108-c5b7-6f49ddb90d75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.484569] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.485023] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d397eff0-6944-4ec0-882f-4415e9399eb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.504216] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 767.504216] env[62974]: value = "task-2654243" [ 767.504216] env[62974]: _type = "Task" [ 767.504216] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.513200] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654243, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.727935] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.782059] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.782327] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquired lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.784666] env[62974]: DEBUG nova.network.neutron [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.846290] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 767.846738] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-de9ecc36-2d73-4c19-be94-8f14296dde1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.859018] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654236, 'name': ReconfigVM_Task, 'duration_secs': 1.47606} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.860729] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Reconfigured VM instance instance-00000035 to attach disk [datastore1] c38cddae-95b3-4f4a-bf3a-5f0bdde548a9/ephemeral_0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.861865] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 767.861865] env[62974]: value = "task-2654244" [ 767.861865] env[62974]: _type = "Task" [ 767.861865] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.861865] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b277f113-3920-40c2-a042-7025a079b31e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.871371] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654244, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.872615] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 767.872615] env[62974]: value = "task-2654245" [ 767.872615] env[62974]: _type = "Task" [ 767.872615] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.880313] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654245, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.945948] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.954920] env[62974]: DEBUG oslo_concurrency.lockutils [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] Releasing lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.955111] env[62974]: DEBUG nova.compute.manager [req-316f7247-0ef6-496f-a2ab-4160a72493e5 req-404283c6-8eda-4169-bcae-36933dd8afd3 service nova] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Received event network-vif-deleted-975d472e-a9c2-416f-9c30-6d3563f96445 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 767.969894] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521553c5-a3c3-a108-c5b7-6f49ddb90d75, 'name': SearchDatastore_Task, 'duration_secs': 0.052591} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.970791] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb6f8514-48d2-4de4-8214-e0be3fab2857 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.981184] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 767.981184] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f6a7f9-f43c-a612-e0e8-f33626769685" [ 767.981184] env[62974]: _type = "Task" [ 767.981184] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.990118] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f6a7f9-f43c-a612-e0e8-f33626769685, 'name': SearchDatastore_Task, 'duration_secs': 0.009687} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.990422] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.990708] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6928b412-e8cb-42fb-bc47-dc8498f12ad1/6928b412-e8cb-42fb-bc47-dc8498f12ad1.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 767.991017] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2ebc186-da77-44e3-8fee-d5b67c97a63b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.000898] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 768.000898] env[62974]: value = "task-2654246" [ 768.000898] env[62974]: _type = "Task" [ 768.000898] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.020580] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654246, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.028712] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.318861] env[62974]: DEBUG nova.network.neutron [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.384835] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654244, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.390908] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654245, 'name': Rename_Task, 'duration_secs': 0.170161} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.391094] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.391357] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd51f56f-d370-44cd-a7ec-de5b3d7aee15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.398400] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 768.398400] env[62974]: value = "task-2654247" [ 768.398400] env[62974]: _type = "Task" [ 768.398400] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.412889] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.445724] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e3c352-bcc6-466a-b5a3-394600cf37d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.452408] env[62974]: DEBUG oslo_vmware.api [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654241, 'name': PowerOnVM_Task, 'duration_secs': 1.322204} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.453254] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.453495] env[62974]: INFO nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Took 9.24 seconds to spawn the instance on the hypervisor. [ 768.453690] env[62974]: DEBUG nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.454531] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1ee148-ed39-4fc1-8710-093379925237 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.461262] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49cb1c9-3546-4b04-90a2-becc2ad2fef5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.500933] env[62974]: DEBUG nova.network.neutron [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.502241] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35390e8-7aca-422b-9d27-42bf63d6a9a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.521124] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64df641-ed37-47e2-83c8-d5d1414b974a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.525050] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654246, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507793} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.528174] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6928b412-e8cb-42fb-bc47-dc8498f12ad1/6928b412-e8cb-42fb-bc47-dc8498f12ad1.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.528409] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.528892] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654243, 'name': ReconfigVM_Task, 'duration_secs': 0.578589} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.529390] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42ddb3a4-5667-4c71-9095-f60338825980 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.531085] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead/65615fd7-c219-4c19-8ecf-11336b616ead.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.539121] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab74954c-944d-4f2a-8a93-7dcf27116715 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.540912] env[62974]: DEBUG nova.compute.provider_tree [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.546181] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 768.546181] env[62974]: value = "task-2654249" [ 768.546181] env[62974]: _type = "Task" [ 768.546181] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.547299] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 768.547299] env[62974]: value = "task-2654248" [ 768.547299] env[62974]: _type = "Task" [ 768.547299] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.559378] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654249, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.562444] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654248, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.821305] env[62974]: DEBUG nova.compute.manager [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Received event network-vif-plugged-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 768.821572] env[62974]: DEBUG oslo_concurrency.lockutils [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] Acquiring lock "12c769fb-8c9e-4089-9563-232cfad89b21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.821804] env[62974]: DEBUG oslo_concurrency.lockutils [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] Lock "12c769fb-8c9e-4089-9563-232cfad89b21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.822446] env[62974]: DEBUG oslo_concurrency.lockutils [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] Lock "12c769fb-8c9e-4089-9563-232cfad89b21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.822446] env[62974]: DEBUG nova.compute.manager [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] No waiting events found dispatching network-vif-plugged-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.822576] env[62974]: WARNING nova.compute.manager [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Received unexpected event network-vif-plugged-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 for instance with vm_state building and task_state spawning. [ 768.822738] env[62974]: DEBUG nova.compute.manager [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Received event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 768.822862] env[62974]: DEBUG nova.compute.manager [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing instance network info cache due to event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 768.823088] env[62974]: DEBUG oslo_concurrency.lockutils [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] Acquiring lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.874939] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654244, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.911621] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654247, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.981017] env[62974]: INFO nova.compute.manager [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Took 46.13 seconds to build instance. [ 769.011418] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Releasing lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.012024] env[62974]: DEBUG nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Instance network_info: |[{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 769.012179] env[62974]: DEBUG oslo_concurrency.lockutils [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] Acquired lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.012388] env[62974]: DEBUG nova.network.neutron [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.014123] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:ab:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaf8ac28-c7f5-4462-9003-c34a22eb0f00', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.023357] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Creating folder: Project (17bfed0a840e43b18856a7a33ec4bafc). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.024300] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77df6ab0-a3a1-4053-9d97-17b839b35f87 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.036605] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Created folder: Project (17bfed0a840e43b18856a7a33ec4bafc) in parent group-v535199. [ 769.036831] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Creating folder: Instances. Parent ref: group-v535356. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.037130] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-366b455c-9a62-406e-9c17-17dbb37387ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.044696] env[62974]: DEBUG nova.scheduler.client.report [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.049935] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Created folder: Instances in parent group-v535356. [ 769.050782] env[62974]: DEBUG oslo.service.loopingcall [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.057416] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.060519] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-522b45a1-16de-4c43-a9cf-2934ce00ef88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.083468] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654249, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069665} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.087695] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.087974] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.087974] env[62974]: value = "task-2654252" [ 769.087974] env[62974]: _type = "Task" [ 769.087974] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.088485] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654248, 'name': Rename_Task, 'duration_secs': 0.152872} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.089214] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5685d63-6141-41c1-9416-debb8b59f7fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.092218] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 769.095013] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef990a13-902d-4691-a0e4-737cc9ca0193 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.116662] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 6928b412-e8cb-42fb-bc47-dc8498f12ad1/6928b412-e8cb-42fb-bc47-dc8498f12ad1.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.121692] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-beabfba6-d490-4db3-b377-c11a23effc88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.138053] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654252, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.138446] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 769.138446] env[62974]: value = "task-2654253" [ 769.138446] env[62974]: _type = "Task" [ 769.138446] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.143756] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 769.143756] env[62974]: value = "task-2654254" [ 769.143756] env[62974]: _type = "Task" [ 769.143756] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.150612] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654253, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.155589] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654254, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.375367] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654244, 'name': CloneVM_Task, 'duration_secs': 1.119411} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.375636] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Created linked-clone VM from snapshot [ 769.376399] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2bd2bd6-609c-45ce-8eef-9f9b93cb4ac5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.384637] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Uploading image d1279753-d482-4ec2-9496-86a886b386a5 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 769.408110] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 769.408110] env[62974]: value = "vm-535355" [ 769.408110] env[62974]: _type = "VirtualMachine" [ 769.408110] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 769.408444] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-eac89e7e-30be-4a5d-926c-12ec2d943326 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.417200] env[62974]: DEBUG oslo_vmware.api [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654247, 'name': PowerOnVM_Task, 'duration_secs': 0.657884} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.417200] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 769.417200] env[62974]: INFO nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Took 12.36 seconds to spawn the instance on the hypervisor. [ 769.417200] env[62974]: DEBUG nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.417683] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lease: (returnval){ [ 769.417683] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52131bae-553b-c00e-8a39-f0318cb1bea6" [ 769.417683] env[62974]: _type = "HttpNfcLease" [ 769.417683] env[62974]: } obtained for exporting VM: (result){ [ 769.417683] env[62974]: value = "vm-535355" [ 769.417683] env[62974]: _type = "VirtualMachine" [ 769.417683] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 769.417976] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the lease: (returnval){ [ 769.417976] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52131bae-553b-c00e-8a39-f0318cb1bea6" [ 769.417976] env[62974]: _type = "HttpNfcLease" [ 769.417976] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 769.418427] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e1921e-1faf-4d20-8b50-000b4d1ef170 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.434686] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 769.434686] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52131bae-553b-c00e-8a39-f0318cb1bea6" [ 769.434686] env[62974]: _type = "HttpNfcLease" [ 769.434686] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 769.483286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5dccf7a7-b09b-441f-9880-9f8d59925d69 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.829s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.552464] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.553030] env[62974]: DEBUG nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 769.555828] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.381s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.557513] env[62974]: INFO nova.compute.claims [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.607281] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654252, 'name': CreateVM_Task, 'duration_secs': 0.362253} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.607464] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 769.611792] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.611981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.612359] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 769.613506] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15b9ddd1-993f-462e-a7a2-9848c2d4a544 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.619962] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 769.619962] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52761211-b7c4-095c-9fba-8e8c2973e8ad" [ 769.619962] env[62974]: _type = "Task" [ 769.619962] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.633364] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52761211-b7c4-095c-9fba-8e8c2973e8ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.647429] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654253, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.656595] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654254, 'name': ReconfigVM_Task, 'duration_secs': 0.334213} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.656852] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 6928b412-e8cb-42fb-bc47-dc8498f12ad1/6928b412-e8cb-42fb-bc47-dc8498f12ad1.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.657462] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-561a2a74-c697-4148-93ee-552941347170 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.663147] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 769.663147] env[62974]: value = "task-2654256" [ 769.663147] env[62974]: _type = "Task" [ 769.663147] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.671943] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654256, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.791210] env[62974]: DEBUG nova.network.neutron [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updated VIF entry in instance network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 769.791585] env[62974]: DEBUG nova.network.neutron [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.932273] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 769.932273] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52131bae-553b-c00e-8a39-f0318cb1bea6" [ 769.932273] env[62974]: _type = "HttpNfcLease" [ 769.932273] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 769.932697] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 769.932697] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52131bae-553b-c00e-8a39-f0318cb1bea6" [ 769.932697] env[62974]: _type = "HttpNfcLease" [ 769.932697] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 769.933345] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e64fab-3105-4abe-b97d-5a9e5a6afcef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.945101] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521a0bbd-68a9-1acf-729a-8682e17570e1/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 769.945359] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521a0bbd-68a9-1acf-729a-8682e17570e1/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 769.946846] env[62974]: INFO nova.compute.manager [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Took 47.57 seconds to build instance. [ 770.006031] env[62974]: DEBUG nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 770.041788] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d07b36c0-2f7c-4876-b4cb-9597af561c50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.064922] env[62974]: DEBUG nova.compute.utils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.069411] env[62974]: DEBUG nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 770.069708] env[62974]: DEBUG nova.network.neutron [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 770.091251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.091480] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.118780] env[62974]: DEBUG nova.policy [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85705a53f9314b08aed10199854f0d2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc2dc33e40e549d1a025e4b883c4dfb1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 770.131576] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52761211-b7c4-095c-9fba-8e8c2973e8ad, 'name': SearchDatastore_Task, 'duration_secs': 0.037369} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.131576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.131576] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.131576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.131959] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.131959] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.131959] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2dec11bb-cb74-4e41-9cd4-5797e4c827fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.141012] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.141321] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 770.145653] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ece0d03-aaac-42a4-a815-5a7a483fb495 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.153928] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654253, 'name': PowerOnVM_Task, 'duration_secs': 0.98821} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.154092] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 770.154092] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c475e-47dd-c4df-bed1-7aff1dd98db9" [ 770.154092] env[62974]: _type = "Task" [ 770.154092] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.154416] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.154508] env[62974]: DEBUG nova.compute.manager [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.155304] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3650396c-702b-48ad-bb94-912aa260763d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.172314] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c475e-47dd-c4df-bed1-7aff1dd98db9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.177679] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654256, 'name': Rename_Task, 'duration_secs': 0.14412} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.178500] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.178995] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1199135-c564-48c8-89f9-8f78194c272e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.186927] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 770.186927] env[62974]: value = "task-2654257" [ 770.186927] env[62974]: _type = "Task" [ 770.186927] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.198890] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.294520] env[62974]: DEBUG oslo_concurrency.lockutils [req-dde04058-2bd1-4abd-a938-d57ab370ec7b req-b2f44aad-c678-4984-8b88-b767f1297d03 service nova] Releasing lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.419782] env[62974]: DEBUG nova.network.neutron [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Successfully created port: 9cfe7952-9fc7-4153-bdf7-356ebd06114e {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 770.450935] env[62974]: DEBUG oslo_concurrency.lockutils [None req-da76f5fb-ac13-4dd0-9f34-3498d34e671c tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.214s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.535548] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.570009] env[62974]: DEBUG nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 770.595290] env[62974]: DEBUG nova.compute.utils [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.667036] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c475e-47dd-c4df-bed1-7aff1dd98db9, 'name': SearchDatastore_Task, 'duration_secs': 0.014722} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.673308] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95421235-18fe-4b1c-8823-6819daa108c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.676669] env[62974]: INFO nova.compute.manager [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] bringing vm to original state: 'stopped' [ 770.683700] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 770.683700] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5241d345-e29a-89e6-955a-ce2b8bcfefde" [ 770.683700] env[62974]: _type = "Task" [ 770.683700] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.701495] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5241d345-e29a-89e6-955a-ce2b8bcfefde, 'name': SearchDatastore_Task, 'duration_secs': 0.01515} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.705199] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.705283] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 12c769fb-8c9e-4089-9563-232cfad89b21/12c769fb-8c9e-4089-9563-232cfad89b21.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 770.705576] env[62974]: DEBUG oslo_vmware.api [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654257, 'name': PowerOnVM_Task, 'duration_secs': 0.49004} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.706934] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9164f901-07c9-4782-b6b7-85aff20c3b62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.709158] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.709390] env[62974]: INFO nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Took 6.66 seconds to spawn the instance on the hypervisor. [ 770.709577] env[62974]: DEBUG nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.713801] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d951a815-cc37-418a-81fc-a25ef896d613 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.725916] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 770.725916] env[62974]: value = "task-2654258" [ 770.725916] env[62974]: _type = "Task" [ 770.725916] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.734697] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654258, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.832108] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.832300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.955400] env[62974]: DEBUG nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.102539] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.240161] env[62974]: INFO nova.compute.manager [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Took 44.75 seconds to build instance. [ 771.250694] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654258, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495585} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.250694] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 12c769fb-8c9e-4089-9563-232cfad89b21/12c769fb-8c9e-4089-9563-232cfad89b21.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 771.250694] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 771.250694] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-410ca9ff-11ed-45ab-b808-ca80a2e30df0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.261040] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 771.261040] env[62974]: value = "task-2654259" [ 771.261040] env[62974]: _type = "Task" [ 771.261040] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.269797] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654259, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.330822] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c603425b-3c12-4736-b30d-83f5a5bc368b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.345171] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7099c1b-e17b-4a8f-a7d7-538ecc403e0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.378218] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb946d3-2372-425e-b26f-409288565aae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.387010] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f5a35e-b1aa-4465-a7b7-280bd588b8c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.402868] env[62974]: DEBUG nova.compute.provider_tree [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.477914] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.582967] env[62974]: DEBUG nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 771.620023] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 771.621533] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.621533] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 771.621533] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.621533] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 771.621533] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 771.621846] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 771.622513] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 771.622967] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 771.625159] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 771.625159] env[62974]: DEBUG nova.virt.hardware [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 771.625159] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463a3368-bb60-4990-b278-825cc4ee0c46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.634799] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffb1ae5-7fe3-40fd-a074-b367ba5e8016 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.684724] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "65615fd7-c219-4c19-8ecf-11336b616ead" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.685357] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.686956] env[62974]: DEBUG nova.compute.manager [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 771.688317] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9580aa43-dfaf-4ab3-bd1a-b474474fc027 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.701463] env[62974]: DEBUG nova.compute.manager [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 771.742461] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b02141eb-7626-4546-ae26-c1be2c50bdf2 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.862s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.773559] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654259, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06876} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.773879] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 771.774692] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72275da-f261-440a-8481-7a851818b2a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.801221] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 12c769fb-8c9e-4089-9563-232cfad89b21/12c769fb-8c9e-4089-9563-232cfad89b21.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 771.802073] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98a997cf-1b05-4b14-9e4e-c023a72c5cbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.824281] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 771.824281] env[62974]: value = "task-2654260" [ 771.824281] env[62974]: _type = "Task" [ 771.824281] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.832934] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654260, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.906983] env[62974]: DEBUG nova.scheduler.client.report [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 772.173572] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.175127] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.175468] env[62974]: INFO nova.compute.manager [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Attaching volume 0e10d030-eb6c-4b13-954b-b185529be495 to /dev/sdb [ 772.207159] env[62974]: DEBUG nova.network.neutron [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Successfully updated port: 9cfe7952-9fc7-4153-bdf7-356ebd06114e {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 772.211136] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 772.211803] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39028b14-fe43-4b36-99e4-91b0aac9fd96 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.222752] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 772.222752] env[62974]: value = "task-2654261" [ 772.222752] env[62974]: _type = "Task" [ 772.222752] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.229669] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4c05b3-59b2-492f-b5d7-c0b7234d51d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.245086] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.245086] env[62974]: DEBUG nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 772.248685] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56b5875-4014-4b5f-886d-a4a67f6213dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.263333] env[62974]: DEBUG nova.virt.block_device [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Updating existing volume attachment record: d159cc2a-5304-495f-8551-3afdf423d841 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 772.335083] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654260, 'name': ReconfigVM_Task, 'duration_secs': 0.487436} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.335328] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 12c769fb-8c9e-4089-9563-232cfad89b21/12c769fb-8c9e-4089-9563-232cfad89b21.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 772.335941] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c579347-41a2-45d5-a182-0a71ff7de4ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.341383] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 772.341383] env[62974]: value = "task-2654262" [ 772.341383] env[62974]: _type = "Task" [ 772.341383] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.350426] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654262, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.358559] env[62974]: DEBUG nova.compute.manager [req-ede633fd-a3dd-4c10-9c05-8a1dcd7c08c8 req-c81dfcd7-5202-4d47-81cf-c287b866c6e1 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Received event network-vif-plugged-9cfe7952-9fc7-4153-bdf7-356ebd06114e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 772.358726] env[62974]: DEBUG oslo_concurrency.lockutils [req-ede633fd-a3dd-4c10-9c05-8a1dcd7c08c8 req-c81dfcd7-5202-4d47-81cf-c287b866c6e1 service nova] Acquiring lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.358988] env[62974]: DEBUG oslo_concurrency.lockutils [req-ede633fd-a3dd-4c10-9c05-8a1dcd7c08c8 req-c81dfcd7-5202-4d47-81cf-c287b866c6e1 service nova] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.359501] env[62974]: DEBUG oslo_concurrency.lockutils [req-ede633fd-a3dd-4c10-9c05-8a1dcd7c08c8 req-c81dfcd7-5202-4d47-81cf-c287b866c6e1 service nova] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.360010] env[62974]: DEBUG nova.compute.manager [req-ede633fd-a3dd-4c10-9c05-8a1dcd7c08c8 req-c81dfcd7-5202-4d47-81cf-c287b866c6e1 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] No waiting events found dispatching network-vif-plugged-9cfe7952-9fc7-4153-bdf7-356ebd06114e {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 772.360010] env[62974]: WARNING nova.compute.manager [req-ede633fd-a3dd-4c10-9c05-8a1dcd7c08c8 req-c81dfcd7-5202-4d47-81cf-c287b866c6e1 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Received unexpected event network-vif-plugged-9cfe7952-9fc7-4153-bdf7-356ebd06114e for instance with vm_state building and task_state spawning. [ 772.412721] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.857s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.413368] env[62974]: DEBUG nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 772.416522] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.878s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.418400] env[62974]: INFO nova.compute.claims [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.509567] env[62974]: DEBUG nova.compute.manager [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Received event network-changed-618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 772.509567] env[62974]: DEBUG nova.compute.manager [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Refreshing instance network info cache due to event network-changed-618880a5-40af-4192-80d0-09a7533719d1. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 772.509567] env[62974]: DEBUG oslo_concurrency.lockutils [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] Acquiring lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.510607] env[62974]: DEBUG oslo_concurrency.lockutils [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] Acquired lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.510607] env[62974]: DEBUG nova.network.neutron [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Refreshing network info cache for port 618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.710833] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.712098] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.712471] env[62974]: DEBUG nova.network.neutron [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.737413] env[62974]: DEBUG oslo_vmware.api [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654261, 'name': PowerOffVM_Task, 'duration_secs': 0.266703} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.737761] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 772.737985] env[62974]: DEBUG nova.compute.manager [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.739760] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d667a54-c740-4205-bd20-54c639947a61 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.827375] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.857579] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654262, 'name': Rename_Task, 'duration_secs': 0.182476} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.858169] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 772.858527] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82a6231d-b368-4089-9530-0ac1685532c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.866091] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 772.866091] env[62974]: value = "task-2654266" [ 772.866091] env[62974]: _type = "Task" [ 772.866091] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.874495] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654266, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.922671] env[62974]: DEBUG nova.compute.utils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.924163] env[62974]: DEBUG nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.924358] env[62974]: DEBUG nova.network.neutron [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.976915] env[62974]: DEBUG nova.policy [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84861fd0e88640529eb573045514dff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39e59f58f7c24529bfce4bcc18cc7925', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 773.271482] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.586s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.284038] env[62974]: DEBUG nova.network.neutron [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.378267] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654266, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.428866] env[62974]: DEBUG nova.network.neutron [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updated VIF entry in instance network info cache for port 618880a5-40af-4192-80d0-09a7533719d1. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.429439] env[62974]: DEBUG nova.network.neutron [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updating instance_info_cache with network_info: [{"id": "618880a5-40af-4192-80d0-09a7533719d1", "address": "fa:16:3e:8f:95:9b", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap618880a5-40", "ovs_interfaceid": "618880a5-40af-4192-80d0-09a7533719d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.433506] env[62974]: DEBUG nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 773.550088] env[62974]: DEBUG nova.network.neutron [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Successfully created port: f8424609-cf9e-4474-a78b-3d28dbdd7cb0 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.578070] env[62974]: DEBUG nova.network.neutron [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Updating instance_info_cache with network_info: [{"id": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "address": "fa:16:3e:21:9e:8b", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfe7952-9f", "ovs_interfaceid": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.785767] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.876564] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654266, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.943148] env[62974]: DEBUG oslo_concurrency.lockutils [req-ab0c2ad0-0c0c-4a2f-b475-526d42a5ba70 req-82b1de6d-997d-4b72-9767-5658ce22aa34 service nova] Releasing lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.063069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21e9599-09b8-4e36-9452-24e941daecad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.071610] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993972fc-593b-4ba2-b12d-089e89de6248 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.103541] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.103936] env[62974]: DEBUG nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Instance network_info: |[{"id": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "address": "fa:16:3e:21:9e:8b", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfe7952-9f", "ovs_interfaceid": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 774.104587] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:9e:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cfe7952-9fc7-4153-bdf7-356ebd06114e', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 774.112209] env[62974]: DEBUG oslo.service.loopingcall [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.112947] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bf50ae-b4eb-4637-bdfd-a32162f49f06 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.115686] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 774.115916] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dedac2b8-bffa-4ef0-856d-c50ab07f6cde {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.140225] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2670f1-b2a0-42a6-b415-9be50b0f48d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.144577] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 774.144577] env[62974]: value = "task-2654267" [ 774.144577] env[62974]: _type = "Task" [ 774.144577] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.157042] env[62974]: DEBUG nova.compute.provider_tree [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.163413] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654267, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.383065] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654266, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.463819] env[62974]: DEBUG nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 774.503742] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 774.504035] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.504157] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.504340] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.504484] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.504657] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 774.505018] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 774.505191] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 774.505451] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 774.505686] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 774.505925] env[62974]: DEBUG nova.virt.hardware [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 774.506964] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6740bb9-8a13-4b77-aac8-d08078bda159 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.518009] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b30a7a8-ee1a-4ef5-80aa-f9a8d5b3782f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.608963] env[62974]: DEBUG nova.compute.manager [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Received event network-changed-1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.609376] env[62974]: DEBUG nova.compute.manager [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Refreshing instance network info cache due to event network-changed-1a9b17b6-73d0-4a26-aeb4-00390730c3b0. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 774.609611] env[62974]: DEBUG oslo_concurrency.lockutils [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] Acquiring lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.609757] env[62974]: DEBUG oslo_concurrency.lockutils [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] Acquired lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.609915] env[62974]: DEBUG nova.network.neutron [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Refreshing network info cache for port 1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.656130] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654267, 'name': CreateVM_Task, 'duration_secs': 0.480249} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.656317] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.657205] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.657436] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.657766] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 774.658030] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adbed287-1bb5-4022-bd0e-a2bb06cd852e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.660853] env[62974]: DEBUG nova.scheduler.client.report [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.670164] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 774.670164] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5277b36b-5027-d5f2-2bf0-1058890b95e3" [ 774.670164] env[62974]: _type = "Task" [ 774.670164] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.676234] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5277b36b-5027-d5f2-2bf0-1058890b95e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.685074] env[62974]: DEBUG nova.compute.manager [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Received event network-changed-618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.685074] env[62974]: DEBUG nova.compute.manager [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Refreshing instance network info cache due to event network-changed-618880a5-40af-4192-80d0-09a7533719d1. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 774.685626] env[62974]: DEBUG oslo_concurrency.lockutils [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] Acquiring lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.685626] env[62974]: DEBUG oslo_concurrency.lockutils [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] Acquired lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.685761] env[62974]: DEBUG nova.network.neutron [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Refreshing network info cache for port 618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.881701] env[62974]: DEBUG oslo_vmware.api [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654266, 'name': PowerOnVM_Task, 'duration_secs': 1.631788} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.881978] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 774.882204] env[62974]: INFO nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Took 8.11 seconds to spawn the instance on the hypervisor. [ 774.882386] env[62974]: DEBUG nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 774.883197] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71467885-1332-444d-972a-2b3853dfe9c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.076769] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "65615fd7-c219-4c19-8ecf-11336b616ead" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.077049] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.077267] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "65615fd7-c219-4c19-8ecf-11336b616ead-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.077443] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.078294] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.083189] env[62974]: INFO nova.compute.manager [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Terminating instance [ 775.088291] env[62974]: DEBUG nova.network.neutron [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Successfully updated port: f8424609-cf9e-4474-a78b-3d28dbdd7cb0 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.166227] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.166908] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 775.169798] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.426s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.170488] env[62974]: DEBUG nova.objects.instance [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lazy-loading 'resources' on Instance uuid bcacc508-b910-4144-bf0b-454b0928ca71 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 775.184124] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5277b36b-5027-d5f2-2bf0-1058890b95e3, 'name': SearchDatastore_Task, 'duration_secs': 0.01939} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.184707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.184910] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 775.185279] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.185329] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.185476] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 775.186023] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce3c1e54-373e-4c85-ac7b-76569980d16f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.197708] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 775.197708] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 775.198388] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1b39f0c-ed1e-4306-aaed-7d815aeb34cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.205471] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 775.205471] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520561e2-df60-f78d-80aa-5c395149876b" [ 775.205471] env[62974]: _type = "Task" [ 775.205471] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.214160] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520561e2-df60-f78d-80aa-5c395149876b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.405323] env[62974]: INFO nova.compute.manager [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Took 44.68 seconds to build instance. [ 775.454857] env[62974]: DEBUG nova.network.neutron [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Updated VIF entry in instance network info cache for port 1a9b17b6-73d0-4a26-aeb4-00390730c3b0. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.455242] env[62974]: DEBUG nova.network.neutron [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Updating instance_info_cache with network_info: [{"id": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "address": "fa:16:3e:86:24:d9", "network": {"id": "eb3f40d6-4045-4fb6-8d0a-5bbfef95dcd1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1079738456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a095f717f7d4c1e81311a0810eed958", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a9b17b6-73", "ovs_interfaceid": "1a9b17b6-73d0-4a26-aeb4-00390730c3b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.493405] env[62974]: DEBUG nova.network.neutron [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updated VIF entry in instance network info cache for port 618880a5-40af-4192-80d0-09a7533719d1. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.493759] env[62974]: DEBUG nova.network.neutron [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updating instance_info_cache with network_info: [{"id": "618880a5-40af-4192-80d0-09a7533719d1", "address": "fa:16:3e:8f:95:9b", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap618880a5-40", "ovs_interfaceid": "618880a5-40af-4192-80d0-09a7533719d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.591023] env[62974]: DEBUG nova.compute.manager [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 775.591023] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.591023] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0634d3e8-c7ed-49c5-bcce-1b2dd647ae0b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.600268] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.600268] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.600268] env[62974]: DEBUG nova.network.neutron [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.606012] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.606716] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7013ff0-3392-4b85-a8f6-50645f2c647b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.676310] env[62974]: DEBUG nova.compute.utils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 775.679495] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 775.683029] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.686338] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.686536] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.686723] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleting the datastore file [datastore1] 65615fd7-c219-4c19-8ecf-11336b616ead {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.687293] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db574bb6-0234-4431-b735-23f05ad06598 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.694249] env[62974]: DEBUG oslo_vmware.api [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 775.694249] env[62974]: value = "task-2654270" [ 775.694249] env[62974]: _type = "Task" [ 775.694249] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.703753] env[62974]: DEBUG oslo_vmware.api [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.718566] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520561e2-df60-f78d-80aa-5c395149876b, 'name': SearchDatastore_Task, 'duration_secs': 0.014522} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.719417] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74ec4675-fa45-41c7-bed8-ab673ca352a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.724831] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 775.724831] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d5b3e-46b8-b585-2170-3a39f199613b" [ 775.724831] env[62974]: _type = "Task" [ 775.724831] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.729626] env[62974]: DEBUG nova.policy [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b18638d1ce6f4d2bb1f1e8117deba2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e728dfba54cd4779aad4879fb213a81b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.751772] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525d5b3e-46b8-b585-2170-3a39f199613b, 'name': SearchDatastore_Task, 'duration_secs': 0.013203} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.752113] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.752674] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b/6cee3cf6-2105-40f7-b7f2-5bd38a01a08b.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.752772] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39e1007c-34a7-42d8-bb56-c425d6cba929 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.760703] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 775.760703] env[62974]: value = "task-2654271" [ 775.760703] env[62974]: _type = "Task" [ 775.760703] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.770421] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654271, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.910446] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a794cc36-0525-47f0-8cf6-0e356d6b0ba8 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "12c769fb-8c9e-4089-9563-232cfad89b21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.249s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.962075] env[62974]: DEBUG oslo_concurrency.lockutils [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] Releasing lock "refresh_cache-c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.962075] env[62974]: DEBUG nova.compute.manager [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Received event network-changed-9cfe7952-9fc7-4153-bdf7-356ebd06114e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 775.962075] env[62974]: DEBUG nova.compute.manager [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Refreshing instance network info cache due to event network-changed-9cfe7952-9fc7-4153-bdf7-356ebd06114e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 775.962075] env[62974]: DEBUG oslo_concurrency.lockutils [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] Acquiring lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.962075] env[62974]: DEBUG oslo_concurrency.lockutils [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] Acquired lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.962380] env[62974]: DEBUG nova.network.neutron [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Refreshing network info cache for port 9cfe7952-9fc7-4153-bdf7-356ebd06114e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 775.995927] env[62974]: DEBUG oslo_concurrency.lockutils [req-78fe0eda-d4e1-4084-accf-e310a242c686 req-dd0c84bd-c070-4b61-9946-372f23000190 service nova] Releasing lock "refresh_cache-6928b412-e8cb-42fb-bc47-dc8498f12ad1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.052873] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Successfully created port: 2fbbc340-11dd-482a-90f2-f281ec84a833 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.158973] env[62974]: DEBUG nova.network.neutron [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.188152] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 776.207858] env[62974]: DEBUG oslo_vmware.api [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225099} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.207858] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.207858] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 776.207858] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.207858] env[62974]: INFO nova.compute.manager [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Took 0.62 seconds to destroy the instance on the hypervisor. [ 776.208202] env[62974]: DEBUG oslo.service.loopingcall [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.208202] env[62974]: DEBUG nova.compute.manager [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 776.208202] env[62974]: DEBUG nova.network.neutron [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.276101] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654271, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.416747] env[62974]: DEBUG nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 776.461568] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e622df3a-13cf-43a4-b831-a62230bb023d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.471975] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c782ecf-2654-4cdc-ae3a-7ac59655e4cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.509830] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a479f8-a176-4d4b-93ec-bb9b4626d51a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.517586] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f62fd4-c363-43e3-9db3-8933c7b604c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.523642] env[62974]: DEBUG nova.network.neutron [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance_info_cache with network_info: [{"id": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "address": "fa:16:3e:5f:6a:8c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8424609-cf", "ovs_interfaceid": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.536163] env[62974]: DEBUG nova.compute.provider_tree [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.773676] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654271, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.831449] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 776.831738] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535360', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'name': 'volume-0e10d030-eb6c-4b13-954b-b185529be495', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3bcbcf35-294e-4d58-b002-cb84db4316d5', 'attached_at': '', 'detached_at': '', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'serial': '0e10d030-eb6c-4b13-954b-b185529be495'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 776.832714] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9328524-ef72-4878-92d3-c0397cdc4d19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.858304] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb625857-4258-462c-97ec-b1087c388987 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.887901] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] volume-0e10d030-eb6c-4b13-954b-b185529be495/volume-0e10d030-eb6c-4b13-954b-b185529be495.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 776.888282] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5e487b0-f999-44c1-a837-8489675c8540 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.909425] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 776.909425] env[62974]: value = "task-2654272" [ 776.909425] env[62974]: _type = "Task" [ 776.909425] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.919913] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654272, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.951724] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.026659] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.030409] env[62974]: DEBUG nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Instance network_info: |[{"id": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "address": "fa:16:3e:5f:6a:8c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8424609-cf", "ovs_interfaceid": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 777.031121] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:6a:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8424609-cf9e-4474-a78b-3d28dbdd7cb0', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.042292] env[62974]: DEBUG oslo.service.loopingcall [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.043332] env[62974]: DEBUG nova.scheduler.client.report [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.047898] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.048538] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1296c788-2283-487d-a4da-536de97d153d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.066609] env[62974]: DEBUG nova.network.neutron [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Updated VIF entry in instance network info cache for port 9cfe7952-9fc7-4153-bdf7-356ebd06114e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.067295] env[62974]: DEBUG nova.network.neutron [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Updating instance_info_cache with network_info: [{"id": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "address": "fa:16:3e:21:9e:8b", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfe7952-9f", "ovs_interfaceid": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.074554] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.074554] env[62974]: value = "task-2654273" [ 777.074554] env[62974]: _type = "Task" [ 777.074554] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.087629] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654273, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.196574] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 777.201920] env[62974]: DEBUG nova.compute.manager [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Received event network-vif-plugged-f8424609-cf9e-4474-a78b-3d28dbdd7cb0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 777.201920] env[62974]: DEBUG oslo_concurrency.lockutils [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] Acquiring lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.202138] env[62974]: DEBUG oslo_concurrency.lockutils [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.202975] env[62974]: DEBUG oslo_concurrency.lockutils [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.202975] env[62974]: DEBUG nova.compute.manager [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] No waiting events found dispatching network-vif-plugged-f8424609-cf9e-4474-a78b-3d28dbdd7cb0 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 777.202975] env[62974]: WARNING nova.compute.manager [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Received unexpected event network-vif-plugged-f8424609-cf9e-4474-a78b-3d28dbdd7cb0 for instance with vm_state building and task_state spawning. [ 777.202975] env[62974]: DEBUG nova.compute.manager [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Received event network-changed-f8424609-cf9e-4474-a78b-3d28dbdd7cb0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 777.203180] env[62974]: DEBUG nova.compute.manager [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Refreshing instance network info cache due to event network-changed-f8424609-cf9e-4474-a78b-3d28dbdd7cb0. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 777.203225] env[62974]: DEBUG oslo_concurrency.lockutils [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] Acquiring lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.203351] env[62974]: DEBUG oslo_concurrency.lockutils [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] Acquired lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.203511] env[62974]: DEBUG nova.network.neutron [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Refreshing network info cache for port f8424609-cf9e-4474-a78b-3d28dbdd7cb0 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.207222] env[62974]: DEBUG nova.network.neutron [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.236013] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 777.236686] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.236686] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.236686] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.236873] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.236968] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.237191] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.237349] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.237515] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.237673] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.237843] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.239024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb3dad6-4138-46c1-9d47-54c6f468809e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.250089] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af6bca8-ebd6-486d-b461-6db2c0765671 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.276658] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654271, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.392989] env[62974]: DEBUG nova.compute.manager [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Received event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 777.393229] env[62974]: DEBUG nova.compute.manager [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing instance network info cache due to event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 777.393524] env[62974]: DEBUG oslo_concurrency.lockutils [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] Acquiring lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.393945] env[62974]: DEBUG oslo_concurrency.lockutils [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] Acquired lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.394110] env[62974]: DEBUG nova.network.neutron [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.421995] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.474909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.474909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.550201] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.380s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.553344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.043s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.554981] env[62974]: INFO nova.compute.claims [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.570064] env[62974]: DEBUG oslo_concurrency.lockutils [req-087bc313-0d14-4243-9d5d-fca377c27dfd req-b572d083-cead-4e3e-9997-d350dee1f508 service nova] Releasing lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.571225] env[62974]: INFO nova.scheduler.client.report [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleted allocations for instance bcacc508-b910-4144-bf0b-454b0928ca71 [ 777.585421] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654273, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.710089] env[62974]: INFO nova.compute.manager [-] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Took 1.50 seconds to deallocate network for instance. [ 777.778702] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654271, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.659477} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.781155] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b/6cee3cf6-2105-40f7-b7f2-5bd38a01a08b.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.781392] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.781682] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eae39bca-c231-4de9-9afc-45b30fd516fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.789198] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 777.789198] env[62974]: value = "task-2654274" [ 777.789198] env[62974]: _type = "Task" [ 777.789198] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.797389] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.890124] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Successfully updated port: 2fbbc340-11dd-482a-90f2-f281ec84a833 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.929107] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.956212] env[62974]: DEBUG nova.network.neutron [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updated VIF entry in instance network info cache for port f8424609-cf9e-4474-a78b-3d28dbdd7cb0. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.956982] env[62974]: DEBUG nova.network.neutron [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance_info_cache with network_info: [{"id": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "address": "fa:16:3e:5f:6a:8c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8424609-cf", "ovs_interfaceid": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.077300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-69849e2f-c018-4e46-8f27-c9099559ea8d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "bcacc508-b910-4144-bf0b-454b0928ca71" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.359s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.088788] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654273, 'name': CreateVM_Task, 'duration_secs': 0.603477} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.088788] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.089571] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.089838] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.090352] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 778.091667] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfe9f174-4274-4c65-8996-5ee301c577c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.097467] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 778.097467] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e5fd82-48b7-7c57-6f18-7855f54949c2" [ 778.097467] env[62974]: _type = "Task" [ 778.097467] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.108403] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e5fd82-48b7-7c57-6f18-7855f54949c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.144306] env[62974]: DEBUG nova.network.neutron [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updated VIF entry in instance network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 778.144782] env[62974]: DEBUG nova.network.neutron [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.217099] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.298463] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139746} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.298739] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 778.299525] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8ea135-a7c5-45ed-80a9-fd606604f336 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.321774] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b/6cee3cf6-2105-40f7-b7f2-5bd38a01a08b.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.322073] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7307811-6427-41f1-a677-9d731fa2ea4f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.341589] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 778.341589] env[62974]: value = "task-2654275" [ 778.341589] env[62974]: _type = "Task" [ 778.341589] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.349902] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654275, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.393781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "refresh_cache-5d6a072e-dba7-461d-9d41-8ca003b31102" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.393961] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "refresh_cache-5d6a072e-dba7-461d-9d41-8ca003b31102" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.394133] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.420337] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654272, 'name': ReconfigVM_Task, 'duration_secs': 1.270209} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.420621] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Reconfigured VM instance instance-00000036 to attach disk [datastore2] volume-0e10d030-eb6c-4b13-954b-b185529be495/volume-0e10d030-eb6c-4b13-954b-b185529be495.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.425261] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4154dd3-be38-4d49-9ba4-5f3b252741f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.440533] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 778.440533] env[62974]: value = "task-2654276" [ 778.440533] env[62974]: _type = "Task" [ 778.440533] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.448913] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654276, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.459921] env[62974]: DEBUG oslo_concurrency.lockutils [req-5058fb56-1203-4f99-b872-be17d2feec21 req-426528a5-8462-4719-9030-5032f8e066ef service nova] Releasing lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.611875] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e5fd82-48b7-7c57-6f18-7855f54949c2, 'name': SearchDatastore_Task, 'duration_secs': 0.021754} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.611875] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.612122] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.612360] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.612507] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.612682] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.612964] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9ce6e4f-cb30-4c8b-98b4-6dc763adb966 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.625846] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.625846] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.626143] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec1cb4de-757b-499b-afd0-ade59fb10e11 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.632889] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 778.632889] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fa1f78-1ffb-6fe7-49a0-81d720bc65b6" [ 778.632889] env[62974]: _type = "Task" [ 778.632889] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.642987] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fa1f78-1ffb-6fe7-49a0-81d720bc65b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.647441] env[62974]: DEBUG oslo_concurrency.lockutils [req-d87ae51a-db6c-49fb-9b08-db4098dd5864 req-fb23bcc3-41f9-4ba4-8002-5fee7ad9435f service nova] Releasing lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.851469] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.929858] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.952078] env[62974]: DEBUG oslo_vmware.api [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654276, 'name': ReconfigVM_Task, 'duration_secs': 0.169847} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.957484] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535360', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'name': 'volume-0e10d030-eb6c-4b13-954b-b185529be495', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3bcbcf35-294e-4d58-b002-cb84db4316d5', 'attached_at': '', 'detached_at': '', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'serial': '0e10d030-eb6c-4b13-954b-b185529be495'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 779.107783] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c221affc-d8cd-4866-a27b-b2ffab794977 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.114873] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e3bb0b-feed-4b92-bc04-9252ad0bad2e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.148864] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Updating instance_info_cache with network_info: [{"id": "2fbbc340-11dd-482a-90f2-f281ec84a833", "address": "fa:16:3e:d0:b7:10", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fbbc340-11", "ovs_interfaceid": "2fbbc340-11dd-482a-90f2-f281ec84a833", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.153469] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbac6895-d482-4c7f-b6a7-c3fa8908b25e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.164506] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2002bd93-4aa6-4cfa-bf92-d701b4be706a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.168167] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fa1f78-1ffb-6fe7-49a0-81d720bc65b6, 'name': SearchDatastore_Task, 'duration_secs': 0.012668} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.169797] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-916deaad-0ca6-41f2-80ef-cb7985a2b60f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.180017] env[62974]: DEBUG nova.compute.provider_tree [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.184308] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 779.184308] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5ab74-badd-5ea1-f709-934a53c5f2b8" [ 779.184308] env[62974]: _type = "Task" [ 779.184308] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.193334] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5ab74-badd-5ea1-f709-934a53c5f2b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.228911] env[62974]: DEBUG nova.compute.manager [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Received event network-vif-deleted-58ed2814-e050-4f6f-9847-7912e525e286 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 779.229133] env[62974]: DEBUG nova.compute.manager [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Received event network-vif-plugged-2fbbc340-11dd-482a-90f2-f281ec84a833 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 779.229371] env[62974]: DEBUG oslo_concurrency.lockutils [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] Acquiring lock "5d6a072e-dba7-461d-9d41-8ca003b31102-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.229669] env[62974]: DEBUG oslo_concurrency.lockutils [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.229792] env[62974]: DEBUG oslo_concurrency.lockutils [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.229958] env[62974]: DEBUG nova.compute.manager [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] No waiting events found dispatching network-vif-plugged-2fbbc340-11dd-482a-90f2-f281ec84a833 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.230161] env[62974]: WARNING nova.compute.manager [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Received unexpected event network-vif-plugged-2fbbc340-11dd-482a-90f2-f281ec84a833 for instance with vm_state building and task_state spawning. [ 779.230327] env[62974]: DEBUG nova.compute.manager [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Received event network-changed-2fbbc340-11dd-482a-90f2-f281ec84a833 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 779.230480] env[62974]: DEBUG nova.compute.manager [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Refreshing instance network info cache due to event network-changed-2fbbc340-11dd-482a-90f2-f281ec84a833. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 779.230659] env[62974]: DEBUG oslo_concurrency.lockutils [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] Acquiring lock "refresh_cache-5d6a072e-dba7-461d-9d41-8ca003b31102" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.351203] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654275, 'name': ReconfigVM_Task, 'duration_secs': 0.679539} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.351537] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b/6cee3cf6-2105-40f7-b7f2-5bd38a01a08b.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.352199] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84a7ecef-7ee8-4738-aa13-bdc7042cd1e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.358487] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 779.358487] env[62974]: value = "task-2654277" [ 779.358487] env[62974]: _type = "Task" [ 779.358487] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.366663] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654277, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.656839] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "refresh_cache-5d6a072e-dba7-461d-9d41-8ca003b31102" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.657224] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Instance network_info: |[{"id": "2fbbc340-11dd-482a-90f2-f281ec84a833", "address": "fa:16:3e:d0:b7:10", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fbbc340-11", "ovs_interfaceid": "2fbbc340-11dd-482a-90f2-f281ec84a833", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 779.657471] env[62974]: DEBUG oslo_concurrency.lockutils [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] Acquired lock "refresh_cache-5d6a072e-dba7-461d-9d41-8ca003b31102" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.657652] env[62974]: DEBUG nova.network.neutron [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Refreshing network info cache for port 2fbbc340-11dd-482a-90f2-f281ec84a833 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.659016] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:b7:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '535b175f-71d3-4226-81ab-ca253f27fedd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2fbbc340-11dd-482a-90f2-f281ec84a833', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.667318] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Creating folder: Project (e728dfba54cd4779aad4879fb213a81b). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.668463] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a6f39cb-04a0-4af3-8c48-0c599bf960c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.678420] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Created folder: Project (e728dfba54cd4779aad4879fb213a81b) in parent group-v535199. [ 779.678642] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Creating folder: Instances. Parent ref: group-v535363. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.678878] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae3760b3-9d4b-4f19-9e5e-6ca3fc4f30b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.683705] env[62974]: DEBUG nova.scheduler.client.report [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 779.688226] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Created folder: Instances in parent group-v535363. [ 779.688406] env[62974]: DEBUG oslo.service.loopingcall [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.691964] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 779.692402] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c1f8a18-0dea-4e07-8c4e-ab6fd12bffed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.715202] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5ab74-badd-5ea1-f709-934a53c5f2b8, 'name': SearchDatastore_Task, 'duration_secs': 0.010874} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.716549] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.716815] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] af370de1-e4d7-4312-bc72-c6398eeaf2ed/af370de1-e4d7-4312-bc72-c6398eeaf2ed.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.717073] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.717073] env[62974]: value = "task-2654280" [ 779.717073] env[62974]: _type = "Task" [ 779.717073] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.717263] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f8e9a61-18d5-4db4-85e5-3e82340d5455 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.727439] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654280, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.729050] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 779.729050] env[62974]: value = "task-2654281" [ 779.729050] env[62974]: _type = "Task" [ 779.729050] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.736516] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654281, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.868353] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654277, 'name': Rename_Task, 'duration_secs': 0.404396} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.869303] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.869869] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68868fd8-28d0-45e7-9849-66478242bc31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.877621] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 779.877621] env[62974]: value = "task-2654282" [ 779.877621] env[62974]: _type = "Task" [ 779.877621] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.885625] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654282, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.896042] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521a0bbd-68a9-1acf-729a-8682e17570e1/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 779.896042] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8930f6fc-25ea-429d-8796-6d695136fed6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.902928] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521a0bbd-68a9-1acf-729a-8682e17570e1/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 779.903173] env[62974]: ERROR oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521a0bbd-68a9-1acf-729a-8682e17570e1/disk-0.vmdk due to incomplete transfer. [ 779.903652] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2446e219-f0df-4755-b6b5-24b080ca0920 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.913219] env[62974]: DEBUG oslo_vmware.rw_handles [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521a0bbd-68a9-1acf-729a-8682e17570e1/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 779.913541] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Uploaded image d1279753-d482-4ec2-9496-86a886b386a5 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 779.916202] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 779.916517] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fd9143e7-86bc-4e2c-816a-b809e71d04b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.923407] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 779.923407] env[62974]: value = "task-2654283" [ 779.923407] env[62974]: _type = "Task" [ 779.923407] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.932515] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654283, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.994722] env[62974]: DEBUG nova.objects.instance [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'flavor' on Instance uuid 3bcbcf35-294e-4d58-b002-cb84db4316d5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.194053] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.194643] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 780.197609] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 28.241s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.197798] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.199348] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 780.199348] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.071s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.199348] env[62974]: DEBUG nova.objects.instance [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lazy-loading 'resources' on Instance uuid 4967d5be-6cd4-4f23-aca4-d9ae11112369 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.201426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d5cec6-7549-4b27-9e10-34d239c7dff2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.213980] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30f15a3-ec33-4cd6-a23a-f78fe1326387 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.242415] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6af91c1-0ed1-4844-bf3c-b902a3f7305d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.250352] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654280, 'name': CreateVM_Task, 'duration_secs': 0.522088} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.252303] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.261748] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.261931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.262622] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 780.263017] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654281, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.263488] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-386045b9-dbca-4585-8690-421efd2781c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.266571] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbb227c-5fc2-403b-a3a7-bdab363245f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.274714] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 780.274714] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523b94f0-fa7d-baf8-1a16-5da14728d5a5" [ 780.274714] env[62974]: _type = "Task" [ 780.274714] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.304866] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178989MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 780.305049] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.313596] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523b94f0-fa7d-baf8-1a16-5da14728d5a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01057} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.314113] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.314262] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.314449] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.314591] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.314769] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.315024] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5cd5531-8564-4c11-81b2-20886677cb9d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.323482] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.323652] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 780.326829] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30d815dc-c29e-48b9-8acb-9248fc000270 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.332131] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 780.332131] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522eda39-0d0a-d3f9-5984-1c8373cc4044" [ 780.332131] env[62974]: _type = "Task" [ 780.332131] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.339913] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522eda39-0d0a-d3f9-5984-1c8373cc4044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.388350] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654282, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.434039] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654283, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.459172] env[62974]: DEBUG nova.network.neutron [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Updated VIF entry in instance network info cache for port 2fbbc340-11dd-482a-90f2-f281ec84a833. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.459550] env[62974]: DEBUG nova.network.neutron [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Updating instance_info_cache with network_info: [{"id": "2fbbc340-11dd-482a-90f2-f281ec84a833", "address": "fa:16:3e:d0:b7:10", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fbbc340-11", "ovs_interfaceid": "2fbbc340-11dd-482a-90f2-f281ec84a833", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.499939] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38697fb2-55e4-4e9e-bf3f-599f7cd7bec2 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.326s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.702031] env[62974]: DEBUG nova.compute.utils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 780.704191] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 780.704191] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 780.748506] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654281, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560017} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.751227] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] af370de1-e4d7-4312-bc72-c6398eeaf2ed/af370de1-e4d7-4312-bc72-c6398eeaf2ed.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 780.751635] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.752419] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d37c4bd-093c-40e4-9b58-d7c3b5d971f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.762387] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 780.762387] env[62974]: value = "task-2654284" [ 780.762387] env[62974]: _type = "Task" [ 780.762387] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.769280] env[62974]: DEBUG nova.policy [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b18638d1ce6f4d2bb1f1e8117deba2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e728dfba54cd4779aad4879fb213a81b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 780.771574] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.772045] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.772932] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "3bcbcf35-294e-4d58-b002-cb84db4316d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.772932] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.772932] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.781272] env[62974]: INFO nova.compute.manager [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Terminating instance [ 780.782848] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.846028] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522eda39-0d0a-d3f9-5984-1c8373cc4044, 'name': SearchDatastore_Task, 'duration_secs': 0.008985} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.846867] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70ac6ae9-ff72-4e9c-a766-5cc4701b8b5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.853365] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 780.853365] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b74fe-5168-a8b0-4329-f045c2accb16" [ 780.853365] env[62974]: _type = "Task" [ 780.853365] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.867868] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b74fe-5168-a8b0-4329-f045c2accb16, 'name': SearchDatastore_Task, 'duration_secs': 0.011708} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.868149] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.868427] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 5d6a072e-dba7-461d-9d41-8ca003b31102/5d6a072e-dba7-461d-9d41-8ca003b31102.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 780.868681] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-286f855d-0ed1-4e88-84b1-7f74fc8b80b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.877253] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 780.877253] env[62974]: value = "task-2654285" [ 780.877253] env[62974]: _type = "Task" [ 780.877253] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.892975] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654282, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.893255] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.936858] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654283, 'name': Destroy_Task, 'duration_secs': 0.941325} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.937168] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Destroyed the VM [ 780.937471] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 780.937888] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8272c4d2-4724-4fef-b7f2-2895b460096d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.944500] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 780.944500] env[62974]: value = "task-2654286" [ 780.944500] env[62974]: _type = "Task" [ 780.944500] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.956497] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654286, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.963818] env[62974]: DEBUG oslo_concurrency.lockutils [req-da135103-3093-4145-a5cb-a8a25b728875 req-21e8fd33-e9d0-4afe-ac11-4013d238ec43 service nova] Releasing lock "refresh_cache-5d6a072e-dba7-461d-9d41-8ca003b31102" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.095585] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Successfully created port: a4dcb530-120c-4113-98f3-faa4e893012d {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.207784] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 781.274366] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06752} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.277300] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.278447] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186bb017-48b7-4918-b4f2-ce4bb8ea4305 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.295659] env[62974]: DEBUG nova.compute.manager [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 781.295897] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.305211] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] af370de1-e4d7-4312-bc72-c6398eeaf2ed/af370de1-e4d7-4312-bc72-c6398eeaf2ed.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.308440] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e94fdc05-dfdf-4352-be25-529ae264a403 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.310336] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e272921-12ce-4bf7-958c-076bffe0fc0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.331590] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 781.331590] env[62974]: value = "task-2654288" [ 781.331590] env[62974]: _type = "Task" [ 781.331590] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.333049] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 781.333049] env[62974]: value = "task-2654287" [ 781.333049] env[62974]: _type = "Task" [ 781.333049] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.342885] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcd8a43-2afa-4884-b953-7ae9c9daf76d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.350675] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654288, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.356039] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.359341] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db20578-ac18-4ef0-a063-4b41bc173923 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.398746] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d069afc-78c0-4f45-ab27-6ce2de7b6a81 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.406898] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522814} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.411909] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 5d6a072e-dba7-461d-9d41-8ca003b31102/5d6a072e-dba7-461d-9d41-8ca003b31102.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 781.412295] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 781.412726] env[62974]: DEBUG oslo_vmware.api [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654282, 'name': PowerOnVM_Task, 'duration_secs': 1.077262} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.413045] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-164a8dfa-7efc-4e13-9a97-a4944d84442e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.415287] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.415789] env[62974]: INFO nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Took 9.83 seconds to spawn the instance on the hypervisor. [ 781.415789] env[62974]: DEBUG nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.417283] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6393dfe9-e444-43d8-93b1-b49100cc7d8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.421269] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be315c27-ef3d-43b3-94e8-84bac4e6dd1c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.440991] env[62974]: DEBUG nova.compute.provider_tree [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.441191] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 781.441191] env[62974]: value = "task-2654289" [ 781.441191] env[62974]: _type = "Task" [ 781.441191] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.453381] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654289, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.461664] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654286, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.845996] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654287, 'name': PowerOffVM_Task, 'duration_secs': 0.217215} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.848937] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 781.849195] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 781.849449] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535360', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'name': 'volume-0e10d030-eb6c-4b13-954b-b185529be495', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3bcbcf35-294e-4d58-b002-cb84db4316d5', 'attached_at': '', 'detached_at': '', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'serial': '0e10d030-eb6c-4b13-954b-b185529be495'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 781.849746] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654288, 'name': ReconfigVM_Task, 'duration_secs': 0.331167} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.850451] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a7f56a-6403-42ed-88fb-60e6026987f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.852768] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Reconfigured VM instance instance-0000003a to attach disk [datastore2] af370de1-e4d7-4312-bc72-c6398eeaf2ed/af370de1-e4d7-4312-bc72-c6398eeaf2ed.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.853370] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6fe83c76-bb8f-4e18-9f63-a5fc52961c58 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.859292] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 781.859292] env[62974]: value = "task-2654290" [ 781.859292] env[62974]: _type = "Task" [ 781.859292] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.878559] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e205df-a1b2-4ef7-b026-0fa80eeecf70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.886684] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654290, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.889028] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91134724-80ef-4df7-ac1f-33da60503e3a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.910222] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6860df2-6244-4458-a2c4-0a31b150d0de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.925677] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] The volume has not been displaced from its original location: [datastore2] volume-0e10d030-eb6c-4b13-954b-b185529be495/volume-0e10d030-eb6c-4b13-954b-b185529be495.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 781.931097] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Reconfiguring VM instance instance-00000036 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 781.931501] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27bcd0d5-b07d-4b66-8128-aca2806cb220 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.945113] env[62974]: DEBUG nova.scheduler.client.report [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 781.960602] env[62974]: INFO nova.compute.manager [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Took 40.70 seconds to build instance. [ 781.963550] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 781.963550] env[62974]: value = "task-2654291" [ 781.963550] env[62974]: _type = "Task" [ 781.963550] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.974729] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654289, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068978} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.974962] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654286, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.976192] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.977027] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb37caa7-bb2e-4e95-ba8b-f312d1fc6295 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.984144] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654291, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.006860] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 5d6a072e-dba7-461d-9d41-8ca003b31102/5d6a072e-dba7-461d-9d41-8ca003b31102.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 782.007571] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-761b4cc9-fc2a-4c52-9d5a-f8ff24b3af82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.030285] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 782.030285] env[62974]: value = "task-2654292" [ 782.030285] env[62974]: _type = "Task" [ 782.030285] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.038236] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654292, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.213920] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] Acquiring lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.213920] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] Acquired lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.214024] env[62974]: DEBUG nova.network.neutron [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.218782] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 782.247846] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.248147] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.248360] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.248585] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.248749] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.248908] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.249169] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.249454] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.249664] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.249849] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.250043] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.250983] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1d4c0b-f4eb-4d50-aa3a-70c35d1aa34a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.260985] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99940e75-2f37-4f77-b3a8-08bb71ac1854 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.385325] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654290, 'name': Rename_Task, 'duration_secs': 0.153803} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.385608] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.385855] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ab204f7-c159-46ec-b475-7ae8021c410a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.392235] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 782.392235] env[62974]: value = "task-2654293" [ 782.392235] env[62974]: _type = "Task" [ 782.392235] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.400217] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.454173] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.255s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.455688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.882s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.457260] env[62974]: INFO nova.compute.claims [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.468378] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42b6d160-437f-4c7e-8aa9-2a86d3c0b53f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.380s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.468737] env[62974]: DEBUG oslo_vmware.api [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654286, 'name': RemoveSnapshot_Task, 'duration_secs': 1.11464} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.473076] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 782.473326] env[62974]: INFO nova.compute.manager [None req-47605d96-b675-4f6c-a002-bdbd005cfd41 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Took 16.23 seconds to snapshot the instance on the hypervisor. [ 782.483475] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654291, 'name': ReconfigVM_Task, 'duration_secs': 0.224244} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.485758] env[62974]: INFO nova.scheduler.client.report [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Deleted allocations for instance 4967d5be-6cd4-4f23-aca4-d9ae11112369 [ 782.487021] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Reconfigured VM instance instance-00000036 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 782.495073] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b4aa661-8c4d-4cea-be32-9da0f0609deb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.514115] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 782.514115] env[62974]: value = "task-2654294" [ 782.514115] env[62974]: _type = "Task" [ 782.514115] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.527792] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654294, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.538658] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654292, 'name': ReconfigVM_Task, 'duration_secs': 0.295405} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.538917] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 5d6a072e-dba7-461d-9d41-8ca003b31102/5d6a072e-dba7-461d-9d41-8ca003b31102.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.540031] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b07b06b-5a7a-4562-bc26-02cc2abe3d90 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.545775] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 782.545775] env[62974]: value = "task-2654295" [ 782.545775] env[62974]: _type = "Task" [ 782.545775] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.554898] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654295, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.630825] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Successfully updated port: a4dcb530-120c-4113-98f3-faa4e893012d {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 782.705588] env[62974]: DEBUG nova.compute.manager [req-4beb3d18-d980-4078-9a67-8f785bebc009 req-5a516a44-7ed0-4905-b43f-eda7e8e0bad4 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Received event network-vif-plugged-a4dcb530-120c-4113-98f3-faa4e893012d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 782.705841] env[62974]: DEBUG oslo_concurrency.lockutils [req-4beb3d18-d980-4078-9a67-8f785bebc009 req-5a516a44-7ed0-4905-b43f-eda7e8e0bad4 service nova] Acquiring lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.706316] env[62974]: DEBUG oslo_concurrency.lockutils [req-4beb3d18-d980-4078-9a67-8f785bebc009 req-5a516a44-7ed0-4905-b43f-eda7e8e0bad4 service nova] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.706733] env[62974]: DEBUG oslo_concurrency.lockutils [req-4beb3d18-d980-4078-9a67-8f785bebc009 req-5a516a44-7ed0-4905-b43f-eda7e8e0bad4 service nova] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.707053] env[62974]: DEBUG nova.compute.manager [req-4beb3d18-d980-4078-9a67-8f785bebc009 req-5a516a44-7ed0-4905-b43f-eda7e8e0bad4 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] No waiting events found dispatching network-vif-plugged-a4dcb530-120c-4113-98f3-faa4e893012d {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 782.707287] env[62974]: WARNING nova.compute.manager [req-4beb3d18-d980-4078-9a67-8f785bebc009 req-5a516a44-7ed0-4905-b43f-eda7e8e0bad4 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Received unexpected event network-vif-plugged-a4dcb530-120c-4113-98f3-faa4e893012d for instance with vm_state building and task_state spawning. [ 782.905525] env[62974]: DEBUG oslo_vmware.api [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654293, 'name': PowerOnVM_Task, 'duration_secs': 0.497453} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.905832] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.905984] env[62974]: INFO nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Took 8.44 seconds to spawn the instance on the hypervisor. [ 782.906187] env[62974]: DEBUG nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 782.907015] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5dbf8a-70dd-4cb8-b7e0-c0dcb2a513be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.935521] env[62974]: DEBUG nova.network.neutron [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Updating instance_info_cache with network_info: [{"id": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "address": "fa:16:3e:21:9e:8b", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfe7952-9f", "ovs_interfaceid": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.978644] env[62974]: DEBUG nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.010673] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4402299e-07e6-44be-babc-42dae050ffc4 tempest-ServersAaction247Test-224573481 tempest-ServersAaction247Test-224573481-project-member] Lock "4967d5be-6cd4-4f23-aca4-d9ae11112369" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.120s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.026157] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.056112] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654295, 'name': Rename_Task, 'duration_secs': 0.173989} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.056397] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 783.056636] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8315db5a-7df5-4de2-ade0-5622cab5960f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.062704] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 783.062704] env[62974]: value = "task-2654296" [ 783.062704] env[62974]: _type = "Task" [ 783.062704] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.070572] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.133905] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "refresh_cache-d6ce3f68-a757-48bc-abeb-49c3aacdf465" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.134071] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "refresh_cache-d6ce3f68-a757-48bc-abeb-49c3aacdf465" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.134296] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.424027] env[62974]: INFO nova.compute.manager [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Took 38.27 seconds to build instance. [ 783.441207] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] Releasing lock "refresh_cache-6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.441207] env[62974]: DEBUG nova.compute.manager [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Inject network info {{(pid=62974) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 783.441403] env[62974]: DEBUG nova.compute.manager [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] network_info to inject: |[{"id": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "address": "fa:16:3e:21:9e:8b", "network": {"id": "02ca5c6a-3296-469c-bb93-ca4a9fb9d034", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1720243746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc2dc33e40e549d1a025e4b883c4dfb1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfe7952-9f", "ovs_interfaceid": "9cfe7952-9fc7-4153-bdf7-356ebd06114e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 783.448977] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Reconfiguring VM instance to set the machine id {{(pid=62974) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 783.448977] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97ced0b6-1596-468f-afc3-2e270dec7f64 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.465690] env[62974]: DEBUG oslo_vmware.api [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] Waiting for the task: (returnval){ [ 783.465690] env[62974]: value = "task-2654297" [ 783.465690] env[62974]: _type = "Task" [ 783.465690] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.477722] env[62974]: DEBUG oslo_vmware.api [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] Task: {'id': task-2654297, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.502430] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.527191] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654294, 'name': ReconfigVM_Task, 'duration_secs': 0.764796} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.527497] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535360', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'name': 'volume-0e10d030-eb6c-4b13-954b-b185529be495', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3bcbcf35-294e-4d58-b002-cb84db4316d5', 'attached_at': '', 'detached_at': '', 'volume_id': '0e10d030-eb6c-4b13-954b-b185529be495', 'serial': '0e10d030-eb6c-4b13-954b-b185529be495'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 783.527784] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.528637] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6849dea3-94f3-4b8d-89e3-4cbc4606e5c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.538037] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 783.538335] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-665a4c9d-6f38-49db-904b-1d1691c85d89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.573394] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654296, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.609775] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 783.609981] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 783.610181] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleting the datastore file [datastore1] 3bcbcf35-294e-4d58-b002-cb84db4316d5 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 783.610478] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22db5bd1-3d6f-4cb2-825d-41f59e6aa2cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.616921] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 783.616921] env[62974]: value = "task-2654299" [ 783.616921] env[62974]: _type = "Task" [ 783.616921] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.625243] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.671738] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.825842] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Updating instance_info_cache with network_info: [{"id": "a4dcb530-120c-4113-98f3-faa4e893012d", "address": "fa:16:3e:af:d2:7b", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4dcb530-12", "ovs_interfaceid": "a4dcb530-120c-4113-98f3-faa4e893012d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.926702] env[62974]: DEBUG oslo_concurrency.lockutils [None req-39837939-280b-4b89-99da-16c700eb0004 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.996s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.975889] env[62974]: DEBUG oslo_vmware.api [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] Task: {'id': task-2654297, 'name': ReconfigVM_Task, 'duration_secs': 0.22501} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.976186] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b709a06c-bbc8-4e59-b7ec-c13efa792ab8 tempest-ServersAdminTestJSON-1382642963 tempest-ServersAdminTestJSON-1382642963-project-admin] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Reconfigured VM instance to set the machine id {{(pid=62974) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 784.039205] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4aa4bb-a213-4e7a-9b01-ad9419d99fbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.048739] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0efe83-56f0-4536-892c-1cbc57b2cb66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.084196] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda45e02-5baa-4ac6-8556-cd79e1896a9a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.092132] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654296, 'name': PowerOnVM_Task, 'duration_secs': 0.723217} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.094253] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 784.094466] env[62974]: INFO nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Took 6.90 seconds to spawn the instance on the hypervisor. [ 784.094650] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 784.095451] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc5b4f2-bf72-489a-a0d1-6988595a57c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.098893] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640f5944-8653-4a3f-9898-d6776bbddfcb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.118445] env[62974]: DEBUG nova.compute.provider_tree [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.128477] env[62974]: DEBUG oslo_vmware.api [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156714} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.129481] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.129691] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.131245] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.131317] env[62974]: INFO nova.compute.manager [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Took 2.84 seconds to destroy the instance on the hypervisor. [ 784.131574] env[62974]: DEBUG oslo.service.loopingcall [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.132090] env[62974]: DEBUG nova.compute.manager [-] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.132203] env[62974]: DEBUG nova.network.neutron [-] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.331686] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "refresh_cache-d6ce3f68-a757-48bc-abeb-49c3aacdf465" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.332090] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Instance network_info: |[{"id": "a4dcb530-120c-4113-98f3-faa4e893012d", "address": "fa:16:3e:af:d2:7b", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4dcb530-12", "ovs_interfaceid": "a4dcb530-120c-4113-98f3-faa4e893012d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 784.332577] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:d2:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '535b175f-71d3-4226-81ab-ca253f27fedd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4dcb530-120c-4113-98f3-faa4e893012d', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.341027] env[62974]: DEBUG oslo.service.loopingcall [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.341027] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.341027] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-426f5f6c-b6fe-475e-9609-83018bd81676 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.362725] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.362725] env[62974]: value = "task-2654300" [ 784.362725] env[62974]: _type = "Task" [ 784.362725] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.371412] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654300, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.429521] env[62974]: DEBUG nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 784.626479] env[62974]: DEBUG nova.scheduler.client.report [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.633862] env[62974]: INFO nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Took 36.11 seconds to build instance. [ 784.873098] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654300, 'name': CreateVM_Task, 'duration_secs': 0.357156} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.873987] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 784.874727] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.874893] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.875225] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 784.875477] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4025ba6a-f962-4a82-9354-acce51d8b4cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.880467] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 784.880467] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c768d6-5f9e-ee63-8922-b167419c39fa" [ 784.880467] env[62974]: _type = "Task" [ 784.880467] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.888953] env[62974]: DEBUG nova.network.neutron [-] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.889529] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c768d6-5f9e-ee63-8922-b167419c39fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.948876] env[62974]: DEBUG nova.compute.manager [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Received event network-changed-a4dcb530-120c-4113-98f3-faa4e893012d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 784.949776] env[62974]: DEBUG nova.compute.manager [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Refreshing instance network info cache due to event network-changed-a4dcb530-120c-4113-98f3-faa4e893012d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 784.950631] env[62974]: DEBUG oslo_concurrency.lockutils [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] Acquiring lock "refresh_cache-d6ce3f68-a757-48bc-abeb-49c3aacdf465" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.950914] env[62974]: DEBUG oslo_concurrency.lockutils [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] Acquired lock "refresh_cache-d6ce3f68-a757-48bc-abeb-49c3aacdf465" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.951322] env[62974]: DEBUG nova.network.neutron [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Refreshing network info cache for port a4dcb530-120c-4113-98f3-faa4e893012d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.968209] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.137018] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.137018] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.139352] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 21.783s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.141092] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.674s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.391642] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c768d6-5f9e-ee63-8922-b167419c39fa, 'name': SearchDatastore_Task, 'duration_secs': 0.00947} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.392411] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.392411] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 785.392411] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.392611] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.392709] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.392989] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1221924d-5b3f-4937-9433-4dd8bdbcd024 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.395166] env[62974]: INFO nova.compute.manager [-] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Took 1.26 seconds to deallocate network for instance. [ 785.406123] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.406123] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 785.406123] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb476555-927a-4b5e-af0a-3b9aaaf4a423 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.415416] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 785.415416] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5257d108-726b-1166-7910-52bd33dc43ac" [ 785.415416] env[62974]: _type = "Task" [ 785.415416] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.425704] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5257d108-726b-1166-7910-52bd33dc43ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.644230] env[62974]: DEBUG nova.compute.utils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 785.644230] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 785.644230] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.645813] env[62974]: DEBUG nova.objects.instance [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lazy-loading 'migration_context' on Instance uuid 8621428e-cf42-47a4-82c8-a003c377b257 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.647477] env[62974]: DEBUG nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.902919] env[62974]: DEBUG nova.policy [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b18638d1ce6f4d2bb1f1e8117deba2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e728dfba54cd4779aad4879fb213a81b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 785.925929] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5257d108-726b-1166-7910-52bd33dc43ac, 'name': SearchDatastore_Task, 'duration_secs': 0.010393} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.926593] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bdc12be-42f5-4061-a154-828846b0c4ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.932231] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 785.932231] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d3a830-55a9-6387-d56f-0ba9b69154db" [ 785.932231] env[62974]: _type = "Task" [ 785.932231] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.942419] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d3a830-55a9-6387-d56f-0ba9b69154db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.973983] env[62974]: INFO nova.compute.manager [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Took 0.58 seconds to detach 1 volumes for instance. [ 786.042988] env[62974]: DEBUG nova.network.neutron [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Updated VIF entry in instance network info cache for port a4dcb530-120c-4113-98f3-faa4e893012d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.043711] env[62974]: DEBUG nova.network.neutron [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Updating instance_info_cache with network_info: [{"id": "a4dcb530-120c-4113-98f3-faa4e893012d", "address": "fa:16:3e:af:d2:7b", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4dcb530-12", "ovs_interfaceid": "a4dcb530-120c-4113-98f3-faa4e893012d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.151223] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 786.187141] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.190228] env[62974]: INFO nova.compute.manager [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Rebuilding instance [ 786.258318] env[62974]: DEBUG nova.compute.manager [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 786.259220] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e3c82d-b835-48ca-a968-f7229662cfb2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.424458] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Successfully created port: cc08e424-4db8-4277-a8b3-2b77913828ac {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.443806] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d3a830-55a9-6387-d56f-0ba9b69154db, 'name': SearchDatastore_Task, 'duration_secs': 0.009672} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.444493] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.444493] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] d6ce3f68-a757-48bc-abeb-49c3aacdf465/d6ce3f68-a757-48bc-abeb-49c3aacdf465.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 786.445009] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2328511a-01e3-4af5-b318-3e9ffd4d627f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.453921] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 786.453921] env[62974]: value = "task-2654301" [ 786.453921] env[62974]: _type = "Task" [ 786.453921] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.475948] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654301, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.483382] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.547380] env[62974]: DEBUG oslo_concurrency.lockutils [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] Releasing lock "refresh_cache-d6ce3f68-a757-48bc-abeb-49c3aacdf465" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.547380] env[62974]: DEBUG nova.compute.manager [req-efd95c8f-aa09-4bc0-86c5-19f1f7374513 req-6397d8fd-4b07-4a78-829e-a8f5e28be7a9 service nova] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Received event network-vif-deleted-331712b7-2ae7-4199-a2b0-e7b880a332e8 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 786.784731] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74dde13f-3e75-4d52-9759-f7f483a1d2cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.790282] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7161eab5-24d9-4517-8f81-485a44fb4608 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.827917] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf846d7-6bdb-4503-bbba-061d42b25e95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.837121] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b5affe-072f-4041-bd96-b6fab17df6f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.859719] env[62974]: DEBUG nova.compute.provider_tree [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.965221] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654301, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.974142] env[62974]: DEBUG nova.compute.manager [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 787.163925] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.186113] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.186113] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.186113] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.186671] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.186671] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.186671] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.186671] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.186671] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.186804] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.186884] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.187060] env[62974]: DEBUG nova.virt.hardware [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.187923] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aac477a-e089-4f3f-a695-93fd7c55fe50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.196808] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abe3f26-608f-443f-af40-f901009249ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.276503] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 787.276839] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04be2b80-c924-4bc0-817d-c0d73abd5b57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.285815] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 787.285815] env[62974]: value = "task-2654302" [ 787.285815] env[62974]: _type = "Task" [ 787.285815] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.300347] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.362921] env[62974]: DEBUG nova.scheduler.client.report [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.465864] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654301, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.507125] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.797779] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.966401] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654301, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.474972} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.966748] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] d6ce3f68-a757-48bc-abeb-49c3aacdf465/d6ce3f68-a757-48bc-abeb-49c3aacdf465.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.966994] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.967466] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b48f9bf9-5faf-410b-8c4d-e0693eb7192d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.974848] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 787.974848] env[62974]: value = "task-2654303" [ 787.974848] env[62974]: _type = "Task" [ 787.974848] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.983588] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.298762] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.299275] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.299624] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.300076] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.300276] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.302059] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654302, 'name': PowerOffVM_Task, 'duration_secs': 0.839847} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.303050] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 788.303050] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.303635] env[62974]: INFO nova.compute.manager [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Terminating instance [ 788.305656] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d18b0b-ea29-4ff6-9faa-6383fd5fb667 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.317249] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 788.317249] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5131e166-19c7-4f01-bd49-150dbabbf022 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.377579] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.238s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.385883] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.228s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.385883] env[62974]: INFO nova.compute.claims [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.398137] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 788.400420] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 788.400420] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleting the datastore file [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 788.400420] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02f56e9f-3dca-4dbe-8b6b-0eb0cef65f0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.409415] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 788.409415] env[62974]: value = "task-2654305" [ 788.409415] env[62974]: _type = "Task" [ 788.409415] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.420627] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.487441] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.181539} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.488080] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Successfully updated port: cc08e424-4db8-4277-a8b3-2b77913828ac {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.489572] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.490745] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25345e4-f8a5-41e6-bb5f-0da6ee0c4d81 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.521828] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] d6ce3f68-a757-48bc-abeb-49c3aacdf465/d6ce3f68-a757-48bc-abeb-49c3aacdf465.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.522714] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6aaedca0-d2ed-42a9-8d18-f0a2de2ac91a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.543536] env[62974]: DEBUG nova.compute.manager [req-372d8cfc-dba9-4d92-909d-53ac04b63f11 req-d7cf8650-b960-4703-a08e-a74bb6a03ce2 service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Received event network-vif-plugged-cc08e424-4db8-4277-a8b3-2b77913828ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 788.543899] env[62974]: DEBUG oslo_concurrency.lockutils [req-372d8cfc-dba9-4d92-909d-53ac04b63f11 req-d7cf8650-b960-4703-a08e-a74bb6a03ce2 service nova] Acquiring lock "1c7fabf7-ba82-4628-9016-b0f198add99a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.544270] env[62974]: DEBUG oslo_concurrency.lockutils [req-372d8cfc-dba9-4d92-909d-53ac04b63f11 req-d7cf8650-b960-4703-a08e-a74bb6a03ce2 service nova] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.544598] env[62974]: DEBUG oslo_concurrency.lockutils [req-372d8cfc-dba9-4d92-909d-53ac04b63f11 req-d7cf8650-b960-4703-a08e-a74bb6a03ce2 service nova] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.544889] env[62974]: DEBUG nova.compute.manager [req-372d8cfc-dba9-4d92-909d-53ac04b63f11 req-d7cf8650-b960-4703-a08e-a74bb6a03ce2 service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] No waiting events found dispatching network-vif-plugged-cc08e424-4db8-4277-a8b3-2b77913828ac {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 788.545222] env[62974]: WARNING nova.compute.manager [req-372d8cfc-dba9-4d92-909d-53ac04b63f11 req-d7cf8650-b960-4703-a08e-a74bb6a03ce2 service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Received unexpected event network-vif-plugged-cc08e424-4db8-4277-a8b3-2b77913828ac for instance with vm_state building and task_state spawning. [ 788.552698] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 788.552698] env[62974]: value = "task-2654306" [ 788.552698] env[62974]: _type = "Task" [ 788.552698] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.563859] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.812136] env[62974]: DEBUG nova.compute.manager [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 788.812307] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.814041] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68baec6a-87ba-40dd-a841-45442d271d89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.824667] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.824667] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bea0fdfa-c1b9-49c2-88c7-8cb060612e46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.831788] env[62974]: DEBUG oslo_vmware.api [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 788.831788] env[62974]: value = "task-2654307" [ 788.831788] env[62974]: _type = "Task" [ 788.831788] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.841602] env[62974]: DEBUG oslo_vmware.api [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.919878] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132706} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.920025] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.920230] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 788.920406] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.946171] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.946513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.992455] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "refresh_cache-1c7fabf7-ba82-4628-9016-b0f198add99a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.992455] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "refresh_cache-1c7fabf7-ba82-4628-9016-b0f198add99a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.992455] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.063829] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654306, 'name': ReconfigVM_Task, 'duration_secs': 0.30283} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.064116] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Reconfigured VM instance instance-0000003c to attach disk [datastore1] d6ce3f68-a757-48bc-abeb-49c3aacdf465/d6ce3f68-a757-48bc-abeb-49c3aacdf465.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.064824] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37e21d31-932b-4df5-8f9c-f472c5ba8940 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.073139] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 789.073139] env[62974]: value = "task-2654308" [ 789.073139] env[62974]: _type = "Task" [ 789.073139] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.082482] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654308, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.342138] env[62974]: DEBUG oslo_vmware.api [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654307, 'name': PowerOffVM_Task, 'duration_secs': 0.218208} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.342423] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 789.342588] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 789.342843] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d93ef70-26d8-4537-882f-faa04732493b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.432935] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 789.433164] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 789.433341] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Deleting the datastore file [datastore1] 0c2642d5-85fe-4db5-9891-025c88ca8c7c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 789.433816] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c4d862e-e11a-42f2-b353-c47e0ed8e5a2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.441258] env[62974]: DEBUG oslo_vmware.api [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 789.441258] env[62974]: value = "task-2654310" [ 789.441258] env[62974]: _type = "Task" [ 789.441258] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.454166] env[62974]: DEBUG oslo_vmware.api [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.537597] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.583785] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654308, 'name': Rename_Task, 'duration_secs': 0.431743} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.584443] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 789.584443] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38419e8b-7ae2-473e-a172-1f0763272cd4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.593169] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 789.593169] env[62974]: value = "task-2654311" [ 789.593169] env[62974]: _type = "Task" [ 789.593169] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.602933] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.818160] env[62974]: DEBUG nova.network.neutron [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Updating instance_info_cache with network_info: [{"id": "cc08e424-4db8-4277-a8b3-2b77913828ac", "address": "fa:16:3e:a1:bb:05", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc08e424-4d", "ovs_interfaceid": "cc08e424-4db8-4277-a8b3-2b77913828ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.923962] env[62974]: INFO nova.compute.manager [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Swapping old allocation on dict_keys(['bd3bd9ae-180c-41cf-831e-3dd3892efa18']) held by migration 51863248-1bda-40f6-8d3a-2b1dc321bf21 for instance [ 789.954414] env[62974]: DEBUG oslo_vmware.api [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31332} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.955620] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 789.955812] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 789.955984] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.956216] env[62974]: INFO nova.compute.manager [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 789.957079] env[62974]: DEBUG oslo.service.loopingcall [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.957237] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493a56a4-c747-468a-9935-8d5708d667c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.961793] env[62974]: DEBUG nova.compute.manager [-] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 789.961904] env[62974]: DEBUG nova.network.neutron [-] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.965531] env[62974]: DEBUG nova.scheduler.client.report [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Overwriting current allocation {'allocations': {'bd3bd9ae-180c-41cf-831e-3dd3892efa18': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 90}}, 'project_id': '7ae52d42e1b04ef890523d2b5834a5de', 'user_id': '6f563d2ef3444b77b3d0fa15328d78b5', 'consumer_generation': 1} on consumer 8621428e-cf42-47a4-82c8-a003c377b257 {{(pid=62974) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 789.970696] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 789.970928] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.971096] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 789.971279] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.971430] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 789.971622] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 789.972057] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 789.972356] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 789.972566] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 789.972738] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 789.972911] env[62974]: DEBUG nova.virt.hardware [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 789.974330] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31637ef2-447d-4b92-96ac-ae6a47ad5419 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.980186] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1cca50-632a-4c3d-b63d-20b97820b357 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.992302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8b9e5e-d636-4644-a0b4-af5e5af5134d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.030930] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d2c72e-ceef-4a68-953c-03fbd73592c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.047041] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:2d:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.055719] env[62974]: DEBUG oslo.service.loopingcall [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.056935] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.057223] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c1647a5-7c20-48c1-948b-6cab4bb52b7f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.080061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.080061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquired lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.080061] env[62974]: DEBUG nova.network.neutron [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 790.089033] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0008a81-9e5e-4140-8ae1-25e21fcf2750 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.096727] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.096727] env[62974]: value = "task-2654312" [ 790.096727] env[62974]: _type = "Task" [ 790.096727] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.120657] env[62974]: DEBUG nova.compute.provider_tree [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.128547] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654311, 'name': PowerOnVM_Task, 'duration_secs': 0.514457} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.128547] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 790.128787] env[62974]: INFO nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Took 7.91 seconds to spawn the instance on the hypervisor. [ 790.129048] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 790.137032] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527d98d4-f5b9-43a2-a446-79b5ec9a24ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.139073] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654312, 'name': CreateVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.320735] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "refresh_cache-1c7fabf7-ba82-4628-9016-b0f198add99a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.321091] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Instance network_info: |[{"id": "cc08e424-4db8-4277-a8b3-2b77913828ac", "address": "fa:16:3e:a1:bb:05", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc08e424-4d", "ovs_interfaceid": "cc08e424-4db8-4277-a8b3-2b77913828ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.321530] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:bb:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '535b175f-71d3-4226-81ab-ca253f27fedd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc08e424-4db8-4277-a8b3-2b77913828ac', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.329921] env[62974]: DEBUG oslo.service.loopingcall [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.330545] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.330799] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4eb5222-8bca-48c9-88f5-c419a70f6a4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.352993] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.352993] env[62974]: value = "task-2654313" [ 790.352993] env[62974]: _type = "Task" [ 790.352993] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.361788] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654313, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.573671] env[62974]: DEBUG nova.compute.manager [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Received event network-changed-cc08e424-4db8-4277-a8b3-2b77913828ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 790.573923] env[62974]: DEBUG nova.compute.manager [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Refreshing instance network info cache due to event network-changed-cc08e424-4db8-4277-a8b3-2b77913828ac. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 790.574183] env[62974]: DEBUG oslo_concurrency.lockutils [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] Acquiring lock "refresh_cache-1c7fabf7-ba82-4628-9016-b0f198add99a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.574327] env[62974]: DEBUG oslo_concurrency.lockutils [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] Acquired lock "refresh_cache-1c7fabf7-ba82-4628-9016-b0f198add99a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.574488] env[62974]: DEBUG nova.network.neutron [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Refreshing network info cache for port cc08e424-4db8-4277-a8b3-2b77913828ac {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.612487] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654312, 'name': CreateVM_Task, 'duration_secs': 0.482439} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.612743] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 790.613512] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.613673] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.614012] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 790.614294] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c18e905-b802-41a1-a4fd-9282be4deb14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.622964] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 790.622964] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525fc29e-4025-49f2-2a1d-e6b574536c68" [ 790.622964] env[62974]: _type = "Task" [ 790.622964] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.629742] env[62974]: DEBUG nova.scheduler.client.report [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.637709] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525fc29e-4025-49f2-2a1d-e6b574536c68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.655916] env[62974]: INFO nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Took 40.18 seconds to build instance. [ 790.868902] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654313, 'name': CreateVM_Task, 'duration_secs': 0.48979} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.869081] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 790.869734] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.933276] env[62974]: DEBUG nova.network.neutron [-] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.992585] env[62974]: DEBUG nova.network.neutron [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance_info_cache with network_info: [{"id": "3b60d221-2cab-4e30-8892-d139b511ccc1", "address": "fa:16:3e:80:cf:bd", "network": {"id": "877aebb2-299c-43da-98b0-58f4ced77a69", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "37ee788d98c44826be80135caef4b658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b60d221-2c", "ovs_interfaceid": "3b60d221-2cab-4e30-8892-d139b511ccc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.138466] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.754s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.139364] env[62974]: DEBUG nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 791.144742] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525fc29e-4025-49f2-2a1d-e6b574536c68, 'name': SearchDatastore_Task, 'duration_secs': 0.011319} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.146577] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.419s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.147144] env[62974]: DEBUG nova.objects.instance [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lazy-loading 'resources' on Instance uuid cf6e4f04-f5f4-46cb-884b-8014af903a10 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 791.151038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.151038] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.151038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.151038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.151253] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.151253] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.151465] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 791.151874] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93ebc10a-a7a0-47b4-9f6e-5487a2d1b803 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.155834] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aa9eb05-6c1e-4031-a5df-b991c581cd5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.161476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.654s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.170526] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 791.170526] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5226a24b-b9b4-1487-e9a5-aa67d88abb45" [ 791.170526] env[62974]: _type = "Task" [ 791.170526] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.179655] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.180225] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.187469] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bf9d150-ecf7-4341-a9c2-8549b995838c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.191320] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5226a24b-b9b4-1487-e9a5-aa67d88abb45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.198836] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 791.198836] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5233e6ec-cff3-a29b-66cc-485196144958" [ 791.198836] env[62974]: _type = "Task" [ 791.198836] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.209850] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5233e6ec-cff3-a29b-66cc-485196144958, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.415580] env[62974]: DEBUG nova.network.neutron [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Updated VIF entry in instance network info cache for port cc08e424-4db8-4277-a8b3-2b77913828ac. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 791.415832] env[62974]: DEBUG nova.network.neutron [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Updating instance_info_cache with network_info: [{"id": "cc08e424-4db8-4277-a8b3-2b77913828ac", "address": "fa:16:3e:a1:bb:05", "network": {"id": "fb545871-6566-4dc4-85cf-c289fbf21fce", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-662857992-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e728dfba54cd4779aad4879fb213a81b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc08e424-4d", "ovs_interfaceid": "cc08e424-4db8-4277-a8b3-2b77913828ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.437958] env[62974]: INFO nova.compute.manager [-] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Took 1.48 seconds to deallocate network for instance. [ 791.495789] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Releasing lock "refresh_cache-8621428e-cf42-47a4-82c8-a003c377b257" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.496170] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 791.496556] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f6ea235-ccdd-429d-b0e1-32ff167f4407 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.505819] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 791.505819] env[62974]: value = "task-2654314" [ 791.505819] env[62974]: _type = "Task" [ 791.505819] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.515010] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.648302] env[62974]: DEBUG nova.compute.utils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.649789] env[62974]: DEBUG nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 791.649963] env[62974]: DEBUG nova.network.neutron [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.667709] env[62974]: DEBUG nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 791.688900] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5226a24b-b9b4-1487-e9a5-aa67d88abb45, 'name': SearchDatastore_Task, 'duration_secs': 0.029831} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.689436] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.689821] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.690190] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.696940] env[62974]: DEBUG nova.policy [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62ba908cd236471e83c63adcffa55199', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5fcbb1d7aa1440cb5c5fbe27662a39e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 791.710175] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5233e6ec-cff3-a29b-66cc-485196144958, 'name': SearchDatastore_Task, 'duration_secs': 0.041833} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.713799] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b648b916-5757-4d1a-a65b-ae0b207283e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.721444] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 791.721444] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52319eb2-d58a-4647-90a2-cf7322852588" [ 791.721444] env[62974]: _type = "Task" [ 791.721444] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.730388] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52319eb2-d58a-4647-90a2-cf7322852588, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.919131] env[62974]: DEBUG oslo_concurrency.lockutils [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] Releasing lock "refresh_cache-1c7fabf7-ba82-4628-9016-b0f198add99a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.919419] env[62974]: DEBUG nova.compute.manager [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Received event network-vif-deleted-5ab4b6f6-7ea8-4151-875b-adaed16c006b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 791.919628] env[62974]: INFO nova.compute.manager [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Neutron deleted interface 5ab4b6f6-7ea8-4151-875b-adaed16c006b; detaching it from the instance and deleting it from the info cache [ 791.919807] env[62974]: DEBUG nova.network.neutron [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.946755] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.019983] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654314, 'name': PowerOffVM_Task, 'duration_secs': 0.248238} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.020488] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 792.021364] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:52:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c356ba03-298c-489b-984a-f2eae32bbcc6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1135043868',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.021643] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.021721] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.022091] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.022244] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.022656] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.022958] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.023147] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.023384] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.023546] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.023713] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.031881] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c451ba0-2fe1-44d5-810e-dcfc274f1da5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.050235] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 792.050235] env[62974]: value = "task-2654315" [ 792.050235] env[62974]: _type = "Task" [ 792.050235] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.062953] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654315, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.156099] env[62974]: DEBUG nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 792.187910] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.193834] env[62974]: DEBUG nova.network.neutron [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Successfully created port: abd131b8-9d9e-4230-b1d9-19c7a25bb78a {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 792.228660] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3d792a-88d2-4713-8e06-9f6f0b0dafb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.240600] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52319eb2-d58a-4647-90a2-cf7322852588, 'name': SearchDatastore_Task, 'duration_secs': 0.026617} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.243477] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.243860] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.244344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.244577] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.245117] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-555d3d03-91ae-4b27-8b29-bf42233c056f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.247891] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cc80c8-2064-41eb-82a4-511e4aa24eac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.252771] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0674fe5d-3910-4b63-84d2-a465d51a4f92 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.288370] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99652dc-f30c-438e-8ebd-c646d3213d82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.291204] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 792.291204] env[62974]: value = "task-2654316" [ 792.291204] env[62974]: _type = "Task" [ 792.291204] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.291345] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.291503] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.292696] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29b48c2a-7dfa-4795-9453-dae5e7e383f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.303199] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c75e74-4254-4013-96b4-068f0049acc1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.310095] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 792.310095] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c1fed-bfb5-5fd6-8074-6c0be75bde23" [ 792.310095] env[62974]: _type = "Task" [ 792.310095] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.310320] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.322019] env[62974]: DEBUG nova.compute.provider_tree [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.330025] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522c1fed-bfb5-5fd6-8074-6c0be75bde23, 'name': SearchDatastore_Task, 'duration_secs': 0.011926} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.335016] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4665add-92e0-4af8-9e36-dcb859a90a88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.338464] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 792.338464] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526a3164-8d62-011f-fe5c-4ffcfbb1367d" [ 792.338464] env[62974]: _type = "Task" [ 792.338464] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.350328] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526a3164-8d62-011f-fe5c-4ffcfbb1367d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.423466] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1348acb1-009a-41e4-9714-21be595cfa3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.434069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c484e5-1e23-48c9-9a7d-d55f8c5fdd82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.475598] env[62974]: DEBUG nova.compute.manager [req-caba0c65-95a5-4c83-bcd7-16ba7a81d92a req-f42fcad8-dff9-40aa-a366-9f8af025210e service nova] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Detach interface failed, port_id=5ab4b6f6-7ea8-4151-875b-adaed16c006b, reason: Instance 0c2642d5-85fe-4db5-9891-025c88ca8c7c could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 792.561814] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654315, 'name': ReconfigVM_Task, 'duration_secs': 0.307815} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.562691] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff6abe1-30b4-4631-9cc7-8b8febf217fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.591274] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:52:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c356ba03-298c-489b-984a-f2eae32bbcc6',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1135043868',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.591541] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.591705] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.591888] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.592046] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.592243] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.592484] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.592632] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.592791] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.592951] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.593135] env[62974]: DEBUG nova.virt.hardware [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.594663] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25516fa3-e03d-44ff-8d2e-d7e8815a1aa3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.600593] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 792.600593] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b66734-0293-2881-8331-1e1089b92a5d" [ 792.600593] env[62974]: _type = "Task" [ 792.600593] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.609543] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b66734-0293-2881-8331-1e1089b92a5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.804765] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654316, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.825451] env[62974]: DEBUG nova.scheduler.client.report [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.851360] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526a3164-8d62-011f-fe5c-4ffcfbb1367d, 'name': SearchDatastore_Task, 'duration_secs': 0.010974} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.851606] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.852123] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1c7fabf7-ba82-4628-9016-b0f198add99a/1c7fabf7-ba82-4628-9016-b0f198add99a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.852441] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb085858-d99a-4f87-a1dd-48ade6cee5a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.862200] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 792.862200] env[62974]: value = "task-2654317" [ 792.862200] env[62974]: _type = "Task" [ 792.862200] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.873256] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.111828] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b66734-0293-2881-8331-1e1089b92a5d, 'name': SearchDatastore_Task, 'duration_secs': 0.010025} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.117289] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfiguring VM instance instance-00000029 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 793.117926] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-938305b9-3ee1-4bb5-9606-1556cb39e35b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.137925] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 793.137925] env[62974]: value = "task-2654318" [ 793.137925] env[62974]: _type = "Task" [ 793.137925] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.146585] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654318, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.164074] env[62974]: DEBUG nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 793.197248] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 793.197503] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.197660] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 793.197837] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.197982] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 793.198143] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 793.198428] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 793.198623] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 793.198803] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 793.199094] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 793.199150] env[62974]: DEBUG nova.virt.hardware [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 793.200060] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da911edb-cc28-4a45-9e68-1f71e2f8f979 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.208862] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27feeae4-2e4b-4f9f-8e25-51a802f1a8c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.303206] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654316, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.331528] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.185s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.333986] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.799s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.335589] env[62974]: INFO nova.compute.claims [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.373485] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.438216] env[62974]: INFO nova.scheduler.client.report [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Deleted allocations for instance cf6e4f04-f5f4-46cb-884b-8014af903a10 [ 793.648108] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.804302] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654316, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.873941] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.947242] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18c1f479-8ee3-4007-9e39-00656b9fc5db tempest-InstanceActionsNegativeTestJSON-1384716503 tempest-InstanceActionsNegativeTestJSON-1384716503-project-member] Lock "cf6e4f04-f5f4-46cb-884b-8014af903a10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.685s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.154658] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.297283] env[62974]: DEBUG nova.network.neutron [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Successfully updated port: abd131b8-9d9e-4230-b1d9-19c7a25bb78a {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 794.301463] env[62974]: DEBUG nova.compute.manager [req-f3080d22-998f-4f25-8f09-0ca7fc4125c5 req-b1a9dd41-b020-4190-abc5-bf0a2d7d2f96 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Received event network-vif-plugged-abd131b8-9d9e-4230-b1d9-19c7a25bb78a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 794.301463] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3080d22-998f-4f25-8f09-0ca7fc4125c5 req-b1a9dd41-b020-4190-abc5-bf0a2d7d2f96 service nova] Acquiring lock "b31dea29-79d6-4117-bdb5-2d38fb660a53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.301463] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3080d22-998f-4f25-8f09-0ca7fc4125c5 req-b1a9dd41-b020-4190-abc5-bf0a2d7d2f96 service nova] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.301818] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3080d22-998f-4f25-8f09-0ca7fc4125c5 req-b1a9dd41-b020-4190-abc5-bf0a2d7d2f96 service nova] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.301818] env[62974]: DEBUG nova.compute.manager [req-f3080d22-998f-4f25-8f09-0ca7fc4125c5 req-b1a9dd41-b020-4190-abc5-bf0a2d7d2f96 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] No waiting events found dispatching network-vif-plugged-abd131b8-9d9e-4230-b1d9-19c7a25bb78a {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 794.301818] env[62974]: WARNING nova.compute.manager [req-f3080d22-998f-4f25-8f09-0ca7fc4125c5 req-b1a9dd41-b020-4190-abc5-bf0a2d7d2f96 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Received unexpected event network-vif-plugged-abd131b8-9d9e-4230-b1d9-19c7a25bb78a for instance with vm_state building and task_state spawning. [ 794.314215] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654316, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.705403} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.315028] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.315294] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.315574] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5f89b80-c189-4d12-acec-fd59408cb10b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.324580] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 794.324580] env[62974]: value = "task-2654319" [ 794.324580] env[62974]: _type = "Task" [ 794.324580] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.333702] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.377328] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654317, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.653244] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654318, 'name': ReconfigVM_Task, 'duration_secs': 1.266794} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.653534] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfigured VM instance instance-00000029 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 794.654928] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a616cb7-a49d-4cc8-940d-2792fe1b1e77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.681779] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.684674] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2beba9ff-a37a-4d3d-ba56-ebb431fe00f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.705016] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 794.705016] env[62974]: value = "task-2654320" [ 794.705016] env[62974]: _type = "Task" [ 794.705016] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.716732] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654320, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.802908] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.803082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.804936] env[62974]: DEBUG nova.network.neutron [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.838196] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165217} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.838196] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.839243] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09381522-487b-4c09-b50c-1cacb8636630 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.863835] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.868020] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2beb877c-a6ea-46fa-8a23-60afc908af89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.893153] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654317, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.851978} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.896998] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1c7fabf7-ba82-4628-9016-b0f198add99a/1c7fabf7-ba82-4628-9016-b0f198add99a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.897274] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.897602] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 794.897602] env[62974]: value = "task-2654321" [ 794.897602] env[62974]: _type = "Task" [ 794.897602] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.897996] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e84dc179-ca87-450e-b29c-75efd6472838 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.908892] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654321, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.912791] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 794.912791] env[62974]: value = "task-2654322" [ 794.912791] env[62974]: _type = "Task" [ 794.912791] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.925258] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe23fa1-fb5f-4ff2-86a9-509a88f6836b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.934875] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31f42dd-b6b0-4b36-a289-25f87d936f90 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.972208] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcc21e4-0f91-4095-9810-60d30bdf1ec4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.981011] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7b32e6-c82b-4d9c-88a0-d1589ac4222a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.996301] env[62974]: DEBUG nova.compute.provider_tree [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.217695] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.335842] env[62974]: DEBUG nova.network.neutron [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.412350] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654321, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.421790] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086459} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.422057] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 795.422821] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38636113-986d-4426-bcc3-e3c4344d79af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.445277] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 1c7fabf7-ba82-4628-9016-b0f198add99a/1c7fabf7-ba82-4628-9016-b0f198add99a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.445575] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2533c98-1720-4dca-935a-618effea02b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.465866] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 795.465866] env[62974]: value = "task-2654323" [ 795.465866] env[62974]: _type = "Task" [ 795.465866] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.478023] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654323, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.499283] env[62974]: DEBUG nova.scheduler.client.report [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.585263] env[62974]: DEBUG nova.network.neutron [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Updating instance_info_cache with network_info: [{"id": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "address": "fa:16:3e:7f:18:40", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabd131b8-9d", "ovs_interfaceid": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.718106] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654320, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.911450] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654321, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.976850] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654323, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.004032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.004883] env[62974]: DEBUG nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 796.007523] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.530s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.009765] env[62974]: INFO nova.compute.claims [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.090048] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.090048] env[62974]: DEBUG nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Instance network_info: |[{"id": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "address": "fa:16:3e:7f:18:40", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabd131b8-9d", "ovs_interfaceid": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 796.090225] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:18:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abd131b8-9d9e-4230-b1d9-19c7a25bb78a', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 796.098741] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Creating folder: Project (f5fcbb1d7aa1440cb5c5fbe27662a39e). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 796.099034] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b4f8d05-5885-493c-a7bb-7de0e08f2c64 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.111451] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Created folder: Project (f5fcbb1d7aa1440cb5c5fbe27662a39e) in parent group-v535199. [ 796.111650] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Creating folder: Instances. Parent ref: group-v535369. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 796.111984] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-756496bb-d3e0-411c-b795-80373eb0fc8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.124554] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Created folder: Instances in parent group-v535369. [ 796.124845] env[62974]: DEBUG oslo.service.loopingcall [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.125013] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 796.125222] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-351260f2-fa83-48a2-a148-b7ec4265bcc1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.145824] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.145824] env[62974]: value = "task-2654326" [ 796.145824] env[62974]: _type = "Task" [ 796.145824] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.154191] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654326, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.222734] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654320, 'name': ReconfigVM_Task, 'duration_secs': 1.03956} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.223080] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257/8621428e-cf42-47a4-82c8-a003c377b257.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.223965] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfedeef-c209-43da-9041-ed015f28d377 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.245396] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a240b6ea-dfac-4c99-8122-61be23b090c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.264647] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832b07fd-00d5-44db-b45c-b26472690396 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.286707] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3197fe7-2960-4d2a-a7e3-5b90ce1a0622 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.294922] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.295207] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73ed6afb-3fe4-45c6-99ce-6fc126ee3b3c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.302748] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 796.302748] env[62974]: value = "task-2654327" [ 796.302748] env[62974]: _type = "Task" [ 796.302748] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.311241] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654327, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.397205] env[62974]: DEBUG nova.compute.manager [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Received event network-changed-abd131b8-9d9e-4230-b1d9-19c7a25bb78a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 796.398174] env[62974]: DEBUG nova.compute.manager [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Refreshing instance network info cache due to event network-changed-abd131b8-9d9e-4230-b1d9-19c7a25bb78a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 796.398174] env[62974]: DEBUG oslo_concurrency.lockutils [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] Acquiring lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.398174] env[62974]: DEBUG oslo_concurrency.lockutils [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] Acquired lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.398174] env[62974]: DEBUG nova.network.neutron [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Refreshing network info cache for port abd131b8-9d9e-4230-b1d9-19c7a25bb78a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 796.412419] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654321, 'name': ReconfigVM_Task, 'duration_secs': 1.021115} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.412669] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Reconfigured VM instance instance-00000013 to attach disk [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.413349] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-774e5eab-1502-476e-873d-8ad256cabe10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.422319] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 796.422319] env[62974]: value = "task-2654328" [ 796.422319] env[62974]: _type = "Task" [ 796.422319] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.434246] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654328, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.478705] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654323, 'name': ReconfigVM_Task, 'duration_secs': 0.545114} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.479372] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 1c7fabf7-ba82-4628-9016-b0f198add99a/1c7fabf7-ba82-4628-9016-b0f198add99a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.479990] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2b1b1aa-c974-4009-bd6c-5d8a85e0eff6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.489176] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 796.489176] env[62974]: value = "task-2654329" [ 796.489176] env[62974]: _type = "Task" [ 796.489176] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.498652] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654329, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.516822] env[62974]: DEBUG nova.compute.utils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 796.518542] env[62974]: DEBUG nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 796.518923] env[62974]: DEBUG nova.network.neutron [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.596445] env[62974]: DEBUG nova.policy [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62ba908cd236471e83c63adcffa55199', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5fcbb1d7aa1440cb5c5fbe27662a39e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 796.659757] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654326, 'name': CreateVM_Task, 'duration_secs': 0.38614} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.660346] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 796.661083] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.661259] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.661579] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 796.661908] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fccdc3af-251b-4df6-95e8-7f8db8939963 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.668621] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 796.668621] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526170cd-bf99-1beb-cbc0-59ffe775b8cc" [ 796.668621] env[62974]: _type = "Task" [ 796.668621] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.677455] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526170cd-bf99-1beb-cbc0-59ffe775b8cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.814988] env[62974]: DEBUG oslo_vmware.api [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654327, 'name': PowerOnVM_Task, 'duration_secs': 0.459596} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.816323] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.935609] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654328, 'name': Rename_Task, 'duration_secs': 0.178648} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.935912] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.936180] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38466c30-c183-4825-b043-06cffe69e207 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.944183] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 796.944183] env[62974]: value = "task-2654330" [ 796.944183] env[62974]: _type = "Task" [ 796.944183] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.954945] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.999471] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654329, 'name': Rename_Task, 'duration_secs': 0.172531} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.000030] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 797.000383] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b804760-6438-4611-add0-99fa90a36a03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.008685] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 797.008685] env[62974]: value = "task-2654331" [ 797.008685] env[62974]: _type = "Task" [ 797.008685] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.023760] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.024661] env[62974]: DEBUG nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 797.106228] env[62974]: DEBUG nova.network.neutron [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Successfully created port: 9f1050c5-0ced-4039-b2a7-cea11ae0f227 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.193026] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526170cd-bf99-1beb-cbc0-59ffe775b8cc, 'name': SearchDatastore_Task, 'duration_secs': 0.020586} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.193026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.193026] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.193026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.193332] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.193332] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.193332] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14c8210e-dd86-459d-b6f5-78535076159f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.204604] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.204955] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 797.205955] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23cd78d5-9375-4050-81f2-734bf3648375 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.214137] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 797.214137] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522e5559-c5dd-d0f0-14f1-849b6dd655e1" [ 797.214137] env[62974]: _type = "Task" [ 797.214137] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.230611] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522e5559-c5dd-d0f0-14f1-849b6dd655e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.303863] env[62974]: DEBUG nova.network.neutron [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Updated VIF entry in instance network info cache for port abd131b8-9d9e-4230-b1d9-19c7a25bb78a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 797.303863] env[62974]: DEBUG nova.network.neutron [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Updating instance_info_cache with network_info: [{"id": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "address": "fa:16:3e:7f:18:40", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabd131b8-9d", "ovs_interfaceid": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.304695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.305044] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.305151] env[62974]: INFO nova.compute.manager [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Shelving [ 797.456191] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654330, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.523896] env[62974]: DEBUG oslo_vmware.api [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654331, 'name': PowerOnVM_Task, 'duration_secs': 0.508906} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.524157] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.524290] env[62974]: INFO nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Took 10.36 seconds to spawn the instance on the hypervisor. [ 797.524469] env[62974]: DEBUG nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.529255] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89c6178-fc38-4c36-a53c-9f638bfa0cdf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.651929] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448736c5-1b43-4c0a-b98e-f36e7d403cb0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.661217] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a35c85c-7ab4-45a7-bcba-91e1d742380e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.705976] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41b6143-7e0c-40e1-aadf-4c59fcc9b412 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.718923] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1fdad8-2692-48c6-ba40-98e3359a63d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.731072] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522e5559-c5dd-d0f0-14f1-849b6dd655e1, 'name': SearchDatastore_Task, 'duration_secs': 0.015324} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.741046] env[62974]: DEBUG nova.compute.provider_tree [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.741046] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b82353fc-d9ab-4231-b06a-ed1330dde969 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.748376] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 797.748376] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bf047d-49ab-a805-4436-5fed901f1ec4" [ 797.748376] env[62974]: _type = "Task" [ 797.748376] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.758492] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bf047d-49ab-a805-4436-5fed901f1ec4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.807954] env[62974]: DEBUG oslo_concurrency.lockutils [req-8762d2a6-1860-47ba-b605-e709fae39975 req-5e4ca3c0-92e6-442e-a841-649a487d0fe2 service nova] Releasing lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.832029] env[62974]: INFO nova.compute.manager [None req-4cbbfaef-509c-4de0-a6f2-3bc0cc886a84 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance to original state: 'active' [ 797.959735] env[62974]: DEBUG oslo_vmware.api [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654330, 'name': PowerOnVM_Task, 'duration_secs': 0.768267} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.959897] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.960393] env[62974]: DEBUG nova.compute.manager [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.962022] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92626b23-47e0-4b3e-93d4-61155581fa04 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.042347] env[62974]: DEBUG nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 798.063490] env[62974]: INFO nova.compute.manager [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Took 42.51 seconds to build instance. [ 798.083853] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 798.084055] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.084365] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 798.084402] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.084529] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 798.084674] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 798.084881] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 798.086042] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 798.086042] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 798.086246] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 798.086339] env[62974]: DEBUG nova.virt.hardware [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 798.088147] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3001242-a8b8-44db-a8aa-dee8905247f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.099125] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab00602-cf52-4d69-94dc-1c829df9b7db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.245035] env[62974]: DEBUG nova.scheduler.client.report [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.261499] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bf047d-49ab-a805-4436-5fed901f1ec4, 'name': SearchDatastore_Task, 'duration_secs': 0.032238} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.261575] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.262756] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b31dea29-79d6-4117-bdb5-2d38fb660a53/b31dea29-79d6-4117-bdb5-2d38fb660a53.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 798.262756] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8a1f439-771c-46b4-9958-24172f6474d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.274322] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 798.274322] env[62974]: value = "task-2654332" [ 798.274322] env[62974]: _type = "Task" [ 798.274322] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.286305] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.318321] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 798.318909] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f3783b9-ad45-40c4-b21e-bece22923341 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.327689] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 798.327689] env[62974]: value = "task-2654333" [ 798.327689] env[62974]: _type = "Task" [ 798.327689] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.349902] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.483048] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.565260] env[62974]: DEBUG oslo_concurrency.lockutils [None req-648bab4a-554e-4c15-a7d1-6e806200dc9a tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.025s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.756018] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.759351] env[62974]: DEBUG nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 798.769033] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.944s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.772872] env[62974]: INFO nova.compute.claims [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.795606] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654332, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.842209] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654333, 'name': PowerOffVM_Task, 'duration_secs': 0.342026} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.842520] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 798.843433] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ae3b0f-ff99-44b3-8df9-764f554c5708 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.865141] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "5d6a072e-dba7-461d-9d41-8ca003b31102" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.865141] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.865141] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "5d6a072e-dba7-461d-9d41-8ca003b31102-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.865141] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.865305] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.870013] env[62974]: INFO nova.compute.manager [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Terminating instance [ 798.870790] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a3b1a1-f1fb-4d5c-af33-1ea7f9befbc3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.950598] env[62974]: DEBUG nova.network.neutron [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Successfully updated port: 9f1050c5-0ced-4039-b2a7-cea11ae0f227 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.008263] env[62974]: DEBUG nova.compute.manager [req-e5008164-b7de-433d-8bd1-f11b6f6e14e7 req-01729a92-fef9-4f4c-baa3-92fde24b08d6 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Received event network-vif-plugged-9f1050c5-0ced-4039-b2a7-cea11ae0f227 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 799.008460] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5008164-b7de-433d-8bd1-f11b6f6e14e7 req-01729a92-fef9-4f4c-baa3-92fde24b08d6 service nova] Acquiring lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.008705] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5008164-b7de-433d-8bd1-f11b6f6e14e7 req-01729a92-fef9-4f4c-baa3-92fde24b08d6 service nova] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.008894] env[62974]: DEBUG oslo_concurrency.lockutils [req-e5008164-b7de-433d-8bd1-f11b6f6e14e7 req-01729a92-fef9-4f4c-baa3-92fde24b08d6 service nova] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.009116] env[62974]: DEBUG nova.compute.manager [req-e5008164-b7de-433d-8bd1-f11b6f6e14e7 req-01729a92-fef9-4f4c-baa3-92fde24b08d6 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] No waiting events found dispatching network-vif-plugged-9f1050c5-0ced-4039-b2a7-cea11ae0f227 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 799.009308] env[62974]: WARNING nova.compute.manager [req-e5008164-b7de-433d-8bd1-f11b6f6e14e7 req-01729a92-fef9-4f4c-baa3-92fde24b08d6 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Received unexpected event network-vif-plugged-9f1050c5-0ced-4039-b2a7-cea11ae0f227 for instance with vm_state building and task_state spawning. [ 799.066935] env[62974]: DEBUG nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 799.140091] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "8621428e-cf42-47a4-82c8-a003c377b257" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.140279] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.140682] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "8621428e-cf42-47a4-82c8-a003c377b257-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.140682] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.140828] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.150637] env[62974]: INFO nova.compute.manager [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Terminating instance [ 799.269812] env[62974]: DEBUG nova.compute.utils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.271300] env[62974]: DEBUG nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 799.271915] env[62974]: DEBUG nova.network.neutron [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 799.290908] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654332, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.355820] env[62974]: DEBUG nova.policy [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62ba908cd236471e83c63adcffa55199', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5fcbb1d7aa1440cb5c5fbe27662a39e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 799.379200] env[62974]: DEBUG nova.compute.manager [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 799.379200] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.379200] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af48b24-8cc4-4601-874f-8bbeb4ebafb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.384536] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 799.385358] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5bf5cd5d-fe11-407f-9c35-6f306d3d2fd7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.391765] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.392570] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51cbcbc8-c72f-4274-bb7d-934408fdfc5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.399199] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 799.399199] env[62974]: value = "task-2654334" [ 799.399199] env[62974]: _type = "Task" [ 799.399199] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.404992] env[62974]: DEBUG oslo_vmware.api [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 799.404992] env[62974]: value = "task-2654335" [ 799.404992] env[62974]: _type = "Task" [ 799.404992] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.414101] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654334, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.421730] env[62974]: DEBUG oslo_vmware.api [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.455533] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "refresh_cache-eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.455533] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "refresh_cache-eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.455533] env[62974]: DEBUG nova.network.neutron [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.594154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.658922] env[62974]: DEBUG nova.compute.manager [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 799.659148] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.660148] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f82366c-aba8-4326-920d-8d02b9fb5e29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.669963] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.670400] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d34b2ddc-d60d-499a-b8d3-b4b121ea77da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.680274] env[62974]: DEBUG oslo_vmware.api [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 799.680274] env[62974]: value = "task-2654336" [ 799.680274] env[62974]: _type = "Task" [ 799.680274] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.691720] env[62974]: DEBUG oslo_vmware.api [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.775653] env[62974]: DEBUG nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 799.793355] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654332, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.859718] env[62974]: INFO nova.compute.manager [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Rebuilding instance [ 799.911930] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654334, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.929795] env[62974]: DEBUG nova.network.neutron [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Successfully created port: 70c67ce0-0054-4b7d-886e-7073fb213aa5 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.934421] env[62974]: DEBUG nova.compute.manager [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.935711] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3820e0-75eb-47ac-962a-738644f35609 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.942276] env[62974]: DEBUG oslo_vmware.api [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654335, 'name': PowerOffVM_Task, 'duration_secs': 0.337089} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.942902] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 799.943236] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 799.943343] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-172f6bde-5801-4f58-b602-94210b5b5e15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.015436] env[62974]: DEBUG nova.network.neutron [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.033223] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 800.033461] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 800.033904] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleting the datastore file [datastore2] 5d6a072e-dba7-461d-9d41-8ca003b31102 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 800.037077] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9324a4e1-3dcf-43f1-871b-56d276e3d2c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.046514] env[62974]: DEBUG oslo_vmware.api [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 800.046514] env[62974]: value = "task-2654338" [ 800.046514] env[62974]: _type = "Task" [ 800.046514] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.061578] env[62974]: DEBUG oslo_vmware.api [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.194894] env[62974]: DEBUG oslo_vmware.api [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654336, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.226619] env[62974]: DEBUG nova.network.neutron [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Updating instance_info_cache with network_info: [{"id": "9f1050c5-0ced-4039-b2a7-cea11ae0f227", "address": "fa:16:3e:6e:99:62", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1050c5-0c", "ovs_interfaceid": "9f1050c5-0ced-4039-b2a7-cea11ae0f227", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.304849] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654332, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.600416} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.305165] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b31dea29-79d6-4117-bdb5-2d38fb660a53/b31dea29-79d6-4117-bdb5-2d38fb660a53.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.305419] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.305675] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f42834c3-649b-47f0-aadd-7b9a38f310ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.314919] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 800.314919] env[62974]: value = "task-2654339" [ 800.314919] env[62974]: _type = "Task" [ 800.314919] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.327360] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654339, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.411366] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654334, 'name': CreateSnapshot_Task, 'duration_secs': 0.941337} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.414300] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 800.415658] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36d8034-5c82-4646-bf3d-91bd2e25e7b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.510030] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9486313a-d877-4737-88fb-d7cf5fa1f527 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.522032] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a1da39-c345-453b-97ec-a7e4942d13b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.566237] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359cea11-8bb5-409a-8bc4-2c320e705e86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.576049] env[62974]: DEBUG oslo_vmware.api [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164277} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.578559] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 800.578855] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 800.579095] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 800.579289] env[62974]: INFO nova.compute.manager [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Took 1.20 seconds to destroy the instance on the hypervisor. [ 800.579549] env[62974]: DEBUG oslo.service.loopingcall [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.579814] env[62974]: DEBUG nova.compute.manager [-] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 800.579911] env[62974]: DEBUG nova.network.neutron [-] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 800.582677] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9ea4df-769a-4d8e-91ad-5632796c057c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.600420] env[62974]: DEBUG nova.compute.provider_tree [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.695075] env[62974]: DEBUG oslo_vmware.api [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654336, 'name': PowerOffVM_Task, 'duration_secs': 0.525176} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.695075] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 800.695191] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 800.695433] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70e66983-6133-4f5c-9313-39f081ed7aeb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.730219] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "refresh_cache-eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.730219] env[62974]: DEBUG nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Instance network_info: |[{"id": "9f1050c5-0ced-4039-b2a7-cea11ae0f227", "address": "fa:16:3e:6e:99:62", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1050c5-0c", "ovs_interfaceid": "9f1050c5-0ced-4039-b2a7-cea11ae0f227", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 800.730410] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:99:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f1050c5-0ced-4039-b2a7-cea11ae0f227', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.739299] env[62974]: DEBUG oslo.service.loopingcall [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.740062] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 800.740329] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80df1c5e-ffa8-4ca6-979f-e4c325585133 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.762395] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.762395] env[62974]: value = "task-2654341" [ 800.762395] env[62974]: _type = "Task" [ 800.762395] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.776560] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654341, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.789061] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 800.789868] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 800.790302] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleting the datastore file [datastore2] 8621428e-cf42-47a4-82c8-a003c377b257 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 800.790514] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7325d43-8b66-449e-b836-2d59206556c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.794743] env[62974]: DEBUG nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 800.806890] env[62974]: DEBUG oslo_vmware.api [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 800.806890] env[62974]: value = "task-2654342" [ 800.806890] env[62974]: _type = "Task" [ 800.806890] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.820149] env[62974]: DEBUG oslo_vmware.api [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.834684] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654339, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082746} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.837321] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.837620] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.837805] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.838063] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.838490] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.838693] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.839205] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.839404] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.839574] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.839805] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.839964] env[62974]: DEBUG nova.virt.hardware [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.840399] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 800.841225] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d0ea1a-61ed-4220-abc2-1ca560f05f80 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.846753] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94babad2-12c0-4791-930c-78719d218e31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.873030] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] b31dea29-79d6-4117-bdb5-2d38fb660a53/b31dea29-79d6-4117-bdb5-2d38fb660a53.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.877203] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20a28f15-498c-4230-af16-0fd0eceb302a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.898077] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8323a21a-a422-44aa-be92-5ecf6d415f9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.918025] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 800.918025] env[62974]: value = "task-2654343" [ 800.918025] env[62974]: _type = "Task" [ 800.918025] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.929408] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654343, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.949222] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 800.950089] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5ee8b551-45c7-4e12-bb1e-2c89d022f28d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.961028] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 800.961028] env[62974]: value = "task-2654344" [ 800.961028] env[62974]: _type = "Task" [ 800.961028] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.964442] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.964790] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-904484e7-8694-4119-800e-75e84cba3beb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.975553] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654344, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.975553] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 800.975553] env[62974]: value = "task-2654345" [ 800.975553] env[62974]: _type = "Task" [ 800.975553] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.984839] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.103454] env[62974]: DEBUG nova.scheduler.client.report [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 801.213237] env[62974]: DEBUG nova.compute.manager [req-c704cfaa-70fe-49c4-b957-9fd5a28dd2f3 req-26c85913-e344-410d-aeac-038de43bc09e service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Received event network-vif-deleted-2fbbc340-11dd-482a-90f2-f281ec84a833 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 801.213812] env[62974]: INFO nova.compute.manager [req-c704cfaa-70fe-49c4-b957-9fd5a28dd2f3 req-26c85913-e344-410d-aeac-038de43bc09e service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Neutron deleted interface 2fbbc340-11dd-482a-90f2-f281ec84a833; detaching it from the instance and deleting it from the info cache [ 801.213812] env[62974]: DEBUG nova.network.neutron [req-c704cfaa-70fe-49c4-b957-9fd5a28dd2f3 req-26c85913-e344-410d-aeac-038de43bc09e service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.275902] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654341, 'name': CreateVM_Task, 'duration_secs': 0.433112} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.276090] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.276810] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.276996] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.277381] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 801.277676] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86a52ecf-f548-42b5-a109-630054492231 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.284403] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 801.284403] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fed67d-1f97-eba2-235d-0a0fd8fcb049" [ 801.284403] env[62974]: _type = "Task" [ 801.284403] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.293618] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fed67d-1f97-eba2-235d-0a0fd8fcb049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.319062] env[62974]: DEBUG oslo_vmware.api [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216982} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.319394] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 801.319611] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 801.319770] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.319950] env[62974]: INFO nova.compute.manager [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Took 1.66 seconds to destroy the instance on the hypervisor. [ 801.320209] env[62974]: DEBUG oslo.service.loopingcall [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 801.320405] env[62974]: DEBUG nova.compute.manager [-] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 801.320569] env[62974]: DEBUG nova.network.neutron [-] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.341020] env[62974]: DEBUG nova.compute.manager [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Received event network-changed-9f1050c5-0ced-4039-b2a7-cea11ae0f227 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 801.341089] env[62974]: DEBUG nova.compute.manager [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Refreshing instance network info cache due to event network-changed-9f1050c5-0ced-4039-b2a7-cea11ae0f227. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 801.341274] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] Acquiring lock "refresh_cache-eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.341419] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] Acquired lock "refresh_cache-eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.341579] env[62974]: DEBUG nova.network.neutron [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Refreshing network info cache for port 9f1050c5-0ced-4039-b2a7-cea11ae0f227 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 801.396560] env[62974]: DEBUG nova.network.neutron [-] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.428166] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654343, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.478942] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654344, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.490605] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654345, 'name': PowerOffVM_Task, 'duration_secs': 0.242037} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.490972] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.491272] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 801.492088] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d09a894-f314-444c-9c94-e3e25cb56a75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.500528] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.500794] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e9763e6-8fbc-426a-a720-1b7fd9596977 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.527666] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.527897] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.574128] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.574357] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.574599] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleting the datastore file [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.574861] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-128be20a-ac6d-4e8c-ae58-379166f487bc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.586865] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 801.586865] env[62974]: value = "task-2654347" [ 801.586865] env[62974]: _type = "Task" [ 801.586865] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.596125] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.612828] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.844s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.613877] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.828s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.614078] env[62974]: DEBUG nova.objects.instance [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 801.717178] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9de619af-7bde-4014-b22c-7a017e62c2fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.728285] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e7bbef-71f5-4f7f-a589-870db89aae28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.767319] env[62974]: DEBUG nova.compute.manager [req-c704cfaa-70fe-49c4-b957-9fd5a28dd2f3 req-26c85913-e344-410d-aeac-038de43bc09e service nova] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Detach interface failed, port_id=2fbbc340-11dd-482a-90f2-f281ec84a833, reason: Instance 5d6a072e-dba7-461d-9d41-8ca003b31102 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 801.794897] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fed67d-1f97-eba2-235d-0a0fd8fcb049, 'name': SearchDatastore_Task, 'duration_secs': 0.010844} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.795234] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.795465] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.795696] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.795944] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.796020] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.796276] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4030617a-9a88-4037-b59a-4ddc47b277e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.805724] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.805908] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.806669] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a125738a-fa98-4a88-8751-8c33a611eca4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.814562] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 801.814562] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520943b2-825b-164d-c98f-f2b8969430ed" [ 801.814562] env[62974]: _type = "Task" [ 801.814562] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.825507] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520943b2-825b-164d-c98f-f2b8969430ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009423} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.826265] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fc16bc0-8573-4ebe-8534-711fcfdf1c85 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.832015] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 801.832015] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5263587d-aa55-428f-637f-73fe5ce7a86f" [ 801.832015] env[62974]: _type = "Task" [ 801.832015] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.840886] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5263587d-aa55-428f-637f-73fe5ce7a86f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.899389] env[62974]: INFO nova.compute.manager [-] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Took 1.32 seconds to deallocate network for instance. [ 801.927914] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654343, 'name': ReconfigVM_Task, 'duration_secs': 0.586842} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.932180] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Reconfigured VM instance instance-0000003e to attach disk [datastore1] b31dea29-79d6-4117-bdb5-2d38fb660a53/b31dea29-79d6-4117-bdb5-2d38fb660a53.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.932180] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d69fab6-6dfe-4b81-9550-65c7986c7fe6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.940285] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 801.940285] env[62974]: value = "task-2654348" [ 801.940285] env[62974]: _type = "Task" [ 801.940285] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.951285] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654348, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.973783] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654344, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.048702] env[62974]: DEBUG nova.network.neutron [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Successfully updated port: 70c67ce0-0054-4b7d-886e-7073fb213aa5 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 802.075482] env[62974]: DEBUG nova.network.neutron [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Updated VIF entry in instance network info cache for port 9f1050c5-0ced-4039-b2a7-cea11ae0f227. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 802.075831] env[62974]: DEBUG nova.network.neutron [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Updating instance_info_cache with network_info: [{"id": "9f1050c5-0ced-4039-b2a7-cea11ae0f227", "address": "fa:16:3e:6e:99:62", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1050c5-0c", "ovs_interfaceid": "9f1050c5-0ced-4039-b2a7-cea11ae0f227", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.100551] env[62974]: DEBUG nova.network.neutron [-] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.101757] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13446} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.102365] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 802.102545] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 802.102720] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 802.117152] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "4adbc990-78cf-482d-bde4-07dfa65cdc9a" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.117380] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "4adbc990-78cf-482d-bde4-07dfa65cdc9a" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.126085] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "4adbc990-78cf-482d-bde4-07dfa65cdc9a" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.009s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.126514] env[62974]: DEBUG nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 802.344770] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5263587d-aa55-428f-637f-73fe5ce7a86f, 'name': SearchDatastore_Task, 'duration_secs': 0.00922} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.344770] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.344916] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26/eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 802.345842] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b7b4d0c-beb8-4a05-b175-db5dcd047bfc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.352744] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 802.352744] env[62974]: value = "task-2654349" [ 802.352744] env[62974]: _type = "Task" [ 802.352744] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.361988] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654349, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.405649] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.452371] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654348, 'name': Rename_Task, 'duration_secs': 0.261825} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.452686] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 802.453077] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fcfd585-41c6-4380-b26d-bca70b14e7ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.462689] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 802.462689] env[62974]: value = "task-2654350" [ 802.462689] env[62974]: _type = "Task" [ 802.462689] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.475668] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654344, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.479178] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654350, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.551296] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "refresh_cache-0bc05477-1802-4f8b-8d23-2742f9baf603" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.551615] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "refresh_cache-0bc05477-1802-4f8b-8d23-2742f9baf603" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.551763] env[62974]: DEBUG nova.network.neutron [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.578621] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1515643-08e1-431b-9c99-95a116b766e1 req-e642f5d0-202f-46ed-9a7b-5f7d96731109 service nova] Releasing lock "refresh_cache-eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.603225] env[62974]: INFO nova.compute.manager [-] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Took 1.28 seconds to deallocate network for instance. [ 802.626250] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee252234-a356-4c0a-a8f8-8d1e4ea5d5b3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.627682] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.676s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.629683] env[62974]: INFO nova.compute.claims [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 802.634537] env[62974]: DEBUG nova.compute.utils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.638752] env[62974]: DEBUG nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 802.638752] env[62974]: DEBUG nova.network.neutron [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 802.690464] env[62974]: DEBUG nova.policy [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf156218a93b4610b408889b02fa4ae1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08e83dd99878401ba921033cbd720343', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 802.864539] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654349, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.977294] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654344, 'name': CloneVM_Task, 'duration_secs': 1.76835} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.981313] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Created linked-clone VM from snapshot [ 802.981617] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654350, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.982822] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fa2417-1168-4644-94d7-4150ba8d6511 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.986489] env[62974]: DEBUG nova.network.neutron [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Successfully created port: ab4cf036-7af9-44a4-aef7-4da58ac03efa {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.996543] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Uploading image a51f6776-a571-4d03-938a-5a97a88c6d55 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 803.023699] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 803.023699] env[62974]: value = "vm-535374" [ 803.023699] env[62974]: _type = "VirtualMachine" [ 803.023699] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 803.023988] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-568fc90b-f482-435b-bc67-472effe66223 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.032444] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lease: (returnval){ [ 803.032444] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521773cc-9310-a2b0-797e-d13a42d7a602" [ 803.032444] env[62974]: _type = "HttpNfcLease" [ 803.032444] env[62974]: } obtained for exporting VM: (result){ [ 803.032444] env[62974]: value = "vm-535374" [ 803.032444] env[62974]: _type = "VirtualMachine" [ 803.032444] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 803.032739] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the lease: (returnval){ [ 803.032739] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521773cc-9310-a2b0-797e-d13a42d7a602" [ 803.032739] env[62974]: _type = "HttpNfcLease" [ 803.032739] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 803.039616] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 803.039616] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521773cc-9310-a2b0-797e-d13a42d7a602" [ 803.039616] env[62974]: _type = "HttpNfcLease" [ 803.039616] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 803.082050] env[62974]: DEBUG nova.network.neutron [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.110619] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.145055] env[62974]: DEBUG nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 803.153681] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 803.153681] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.153681] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 803.153681] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.154518] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 803.154518] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 803.154518] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 803.154518] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 803.154518] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 803.154731] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 803.154731] env[62974]: DEBUG nova.virt.hardware [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 803.155032] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f12d70-a164-43f6-aab6-a59be516007a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.166761] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbe7fe0-a30d-4ef9-aa89-7d60e984a304 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.181473] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:2d:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.190096] env[62974]: DEBUG oslo.service.loopingcall [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.190921] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.191167] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f0d967d-e140-4e12-8dbf-16f23e1479b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.226458] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.226458] env[62974]: value = "task-2654352" [ 803.226458] env[62974]: _type = "Task" [ 803.226458] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.238702] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654352, 'name': CreateVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.281851] env[62974]: DEBUG nova.network.neutron [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Updating instance_info_cache with network_info: [{"id": "70c67ce0-0054-4b7d-886e-7073fb213aa5", "address": "fa:16:3e:57:39:04", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70c67ce0-00", "ovs_interfaceid": "70c67ce0-0054-4b7d-886e-7073fb213aa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.354168] env[62974]: DEBUG nova.compute.manager [req-7d482db3-cfdf-4ec8-878c-4ba9c5004fb2 req-fa60e3de-6775-4272-9b99-fcd292931e42 service nova] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Received event network-vif-deleted-3b60d221-2cab-4e30-8892-d139b511ccc1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 803.365140] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654349, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527505} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.365416] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26/eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 803.365628] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.366117] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cc33461-6762-48c2-9db9-32607d432cc5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.374648] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 803.374648] env[62974]: value = "task-2654353" [ 803.374648] env[62974]: _type = "Task" [ 803.374648] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.382881] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.464046] env[62974]: DEBUG nova.compute.manager [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Received event network-vif-plugged-70c67ce0-0054-4b7d-886e-7073fb213aa5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 803.464046] env[62974]: DEBUG oslo_concurrency.lockutils [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] Acquiring lock "0bc05477-1802-4f8b-8d23-2742f9baf603-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.464046] env[62974]: DEBUG oslo_concurrency.lockutils [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.464145] env[62974]: DEBUG oslo_concurrency.lockutils [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.464293] env[62974]: DEBUG nova.compute.manager [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] No waiting events found dispatching network-vif-plugged-70c67ce0-0054-4b7d-886e-7073fb213aa5 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 803.464454] env[62974]: WARNING nova.compute.manager [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Received unexpected event network-vif-plugged-70c67ce0-0054-4b7d-886e-7073fb213aa5 for instance with vm_state building and task_state spawning. [ 803.464605] env[62974]: DEBUG nova.compute.manager [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Received event network-changed-70c67ce0-0054-4b7d-886e-7073fb213aa5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 803.464751] env[62974]: DEBUG nova.compute.manager [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Refreshing instance network info cache due to event network-changed-70c67ce0-0054-4b7d-886e-7073fb213aa5. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 803.464907] env[62974]: DEBUG oslo_concurrency.lockutils [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] Acquiring lock "refresh_cache-0bc05477-1802-4f8b-8d23-2742f9baf603" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.476770] env[62974]: DEBUG oslo_vmware.api [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654350, 'name': PowerOnVM_Task, 'duration_secs': 0.682988} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.477068] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.477276] env[62974]: INFO nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Took 10.31 seconds to spawn the instance on the hypervisor. [ 803.477455] env[62974]: DEBUG nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.478228] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a425a934-2cb2-436c-8c7e-2a66ed9d6f49 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.542056] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 803.542056] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521773cc-9310-a2b0-797e-d13a42d7a602" [ 803.542056] env[62974]: _type = "HttpNfcLease" [ 803.542056] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 803.542394] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 803.542394] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521773cc-9310-a2b0-797e-d13a42d7a602" [ 803.542394] env[62974]: _type = "HttpNfcLease" [ 803.542394] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 803.543149] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ace017-dccf-4e3f-9dfe-50db71e1b956 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.551368] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d33f35-f5e6-7e8d-3563-614fc3c3c994/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 803.551546] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d33f35-f5e6-7e8d-3563-614fc3c3c994/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 803.739483] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654352, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.754342] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1be61651-94cc-44e1-95c6-755ecdc45cd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.784842] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "refresh_cache-0bc05477-1802-4f8b-8d23-2742f9baf603" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.785150] env[62974]: DEBUG nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Instance network_info: |[{"id": "70c67ce0-0054-4b7d-886e-7073fb213aa5", "address": "fa:16:3e:57:39:04", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70c67ce0-00", "ovs_interfaceid": "70c67ce0-0054-4b7d-886e-7073fb213aa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 803.785617] env[62974]: DEBUG oslo_concurrency.lockutils [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] Acquired lock "refresh_cache-0bc05477-1802-4f8b-8d23-2742f9baf603" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.785806] env[62974]: DEBUG nova.network.neutron [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Refreshing network info cache for port 70c67ce0-0054-4b7d-886e-7073fb213aa5 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.787025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:39:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70c67ce0-0054-4b7d-886e-7073fb213aa5', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.796050] env[62974]: DEBUG oslo.service.loopingcall [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.798915] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.801679] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6c3d74a-92fb-414c-a65c-b59992efaeda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.825042] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.825042] env[62974]: value = "task-2654354" [ 803.825042] env[62974]: _type = "Task" [ 803.825042] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.835809] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654354, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.890246] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071214} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.890630] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.891536] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a36d4b5-7d34-44df-9c95-a0c872e1e376 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.918162] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26/eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.923633] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80baaf09-1df0-4830-851e-a7f6ecb3bd85 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.948020] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 803.948020] env[62974]: value = "task-2654355" [ 803.948020] env[62974]: _type = "Task" [ 803.948020] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.961691] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654355, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.996404] env[62974]: INFO nova.compute.manager [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Took 38.86 seconds to build instance. [ 804.156255] env[62974]: DEBUG nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 804.176651] env[62974]: DEBUG nova.network.neutron [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Updated VIF entry in instance network info cache for port 70c67ce0-0054-4b7d-886e-7073fb213aa5. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.177212] env[62974]: DEBUG nova.network.neutron [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Updating instance_info_cache with network_info: [{"id": "70c67ce0-0054-4b7d-886e-7073fb213aa5", "address": "fa:16:3e:57:39:04", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70c67ce0-00", "ovs_interfaceid": "70c67ce0-0054-4b7d-886e-7073fb213aa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.187805] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 804.188144] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.188316] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.188668] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.188999] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.189234] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 804.189476] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 804.189712] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 804.189970] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 804.190162] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 804.190339] env[62974]: DEBUG nova.virt.hardware [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 804.191614] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbada153-ec1d-49da-b5f3-3f1bf88df57d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.207168] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecbac48-add2-491e-93f9-1dafa144d699 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.241367] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654352, 'name': CreateVM_Task, 'duration_secs': 0.792473} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.244565] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.246074] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.246321] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.246644] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 804.246937] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae9c5c1-c9bd-4bf7-935b-f00d78293be4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.253470] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 804.253470] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527e76f0-aaa5-675b-38d2-a7125d9daa6f" [ 804.253470] env[62974]: _type = "Task" [ 804.253470] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.267645] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527e76f0-aaa5-675b-38d2-a7125d9daa6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.300844] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0220d6b4-480e-4219-b2ae-a431550fd4ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.309058] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1547e9-7abb-4920-9dc8-45b5cb7868ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.345468] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bde175d-377d-4c13-93c5-82794eb2ad94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.022020] env[62974]: DEBUG nova.network.neutron [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Successfully updated port: ab4cf036-7af9-44a4-aef7-4da58ac03efa {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.023494] env[62974]: DEBUG oslo_concurrency.lockutils [None req-60286a05-3ef1-4c97-81de-f8064a760f1b tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.279s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.024255] env[62974]: DEBUG oslo_concurrency.lockutils [req-d29af984-6a73-46cf-94a8-cf48efb09ffe req-fbcd930a-08f0-429c-912b-0619a65f4603 service nova] Releasing lock "refresh_cache-0bc05477-1802-4f8b-8d23-2742f9baf603" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.039588] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654354, 'name': CreateVM_Task, 'duration_secs': 0.359965} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.041368] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3fafd30-3418-4fee-a867-776c7a334d7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.045154] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 805.052427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.053019] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527e76f0-aaa5-675b-38d2-a7125d9daa6f, 'name': SearchDatastore_Task, 'duration_secs': 0.012577} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.053275] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654355, 'name': ReconfigVM_Task, 'duration_secs': 0.351335} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.054995] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.055302] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.055590] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.055700] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.055925] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.056179] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Reconfigured VM instance instance-0000003f to attach disk [datastore2] eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26/eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.064637] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.065095] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 805.065372] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7a33a0d-3fd2-40d5-a465-67ffb5f5aeeb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.067274] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c630b9e-5355-40dc-a03c-b37f1d845a4c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.069203] env[62974]: DEBUG nova.compute.provider_tree [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.070516] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0042e91d-8982-462b-8ff2-629458ff0449 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.076576] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 805.076576] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52448066-0737-267d-856b-b66eb68c91cd" [ 805.076576] env[62974]: _type = "Task" [ 805.076576] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.082149] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 805.082149] env[62974]: value = "task-2654356" [ 805.082149] env[62974]: _type = "Task" [ 805.082149] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.084162] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.084361] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.088397] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19a1796b-709c-41b7-9329-b968cad678a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.093929] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52448066-0737-267d-856b-b66eb68c91cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.097808] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 805.097808] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ca340c-c8d0-83c4-c390-56f3b0e72bb1" [ 805.097808] env[62974]: _type = "Task" [ 805.097808] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.101417] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654356, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.111719] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ca340c-c8d0-83c4-c390-56f3b0e72bb1, 'name': SearchDatastore_Task, 'duration_secs': 0.010294} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.112822] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54ea3b9c-a048-4211-a951-9758df5183ea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.119534] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 805.119534] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f11425-067f-6d6e-7539-482c06881fa6" [ 805.119534] env[62974]: _type = "Task" [ 805.119534] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.128902] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f11425-067f-6d6e-7539-482c06881fa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.380710] env[62974]: DEBUG nova.compute.manager [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Received event network-vif-plugged-ab4cf036-7af9-44a4-aef7-4da58ac03efa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 805.381130] env[62974]: DEBUG oslo_concurrency.lockutils [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] Acquiring lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.381358] env[62974]: DEBUG oslo_concurrency.lockutils [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.381463] env[62974]: DEBUG oslo_concurrency.lockutils [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.381755] env[62974]: DEBUG nova.compute.manager [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] No waiting events found dispatching network-vif-plugged-ab4cf036-7af9-44a4-aef7-4da58ac03efa {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 805.381977] env[62974]: WARNING nova.compute.manager [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Received unexpected event network-vif-plugged-ab4cf036-7af9-44a4-aef7-4da58ac03efa for instance with vm_state building and task_state spawning. [ 805.382200] env[62974]: DEBUG nova.compute.manager [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Received event network-changed-ab4cf036-7af9-44a4-aef7-4da58ac03efa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 805.382456] env[62974]: DEBUG nova.compute.manager [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Refreshing instance network info cache due to event network-changed-ab4cf036-7af9-44a4-aef7-4da58ac03efa. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 805.382712] env[62974]: DEBUG oslo_concurrency.lockutils [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] Acquiring lock "refresh_cache-6c7401b6-a69f-4de3-aeb9-26c727d57b76" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.382917] env[62974]: DEBUG oslo_concurrency.lockutils [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] Acquired lock "refresh_cache-6c7401b6-a69f-4de3-aeb9-26c727d57b76" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.383146] env[62974]: DEBUG nova.network.neutron [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Refreshing network info cache for port ab4cf036-7af9-44a4-aef7-4da58ac03efa {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 805.527077] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "refresh_cache-6c7401b6-a69f-4de3-aeb9-26c727d57b76" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.535712] env[62974]: DEBUG nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 805.574938] env[62974]: DEBUG nova.scheduler.client.report [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.596041] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654356, 'name': Rename_Task, 'duration_secs': 0.167829} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.599647] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 805.600232] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52448066-0737-267d-856b-b66eb68c91cd, 'name': SearchDatastore_Task, 'duration_secs': 0.014149} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.600598] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4baa5ad5-eebe-496d-b40c-16983f56e49e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.602423] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.602889] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.603325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.610990] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 805.610990] env[62974]: value = "task-2654357" [ 805.610990] env[62974]: _type = "Task" [ 805.610990] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.622941] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.632744] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f11425-067f-6d6e-7539-482c06881fa6, 'name': SearchDatastore_Task, 'duration_secs': 0.011393} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.633256] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.633632] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 805.634198] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.634589] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.634933] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e3e23eb-82f0-4478-bbf8-a12845b003cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.637333] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59299b07-d339-4f2d-bf41-f3ddf49be25e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.646550] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 805.646550] env[62974]: value = "task-2654358" [ 805.646550] env[62974]: _type = "Task" [ 805.646550] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.654487] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.654681] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.656035] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc992549-0050-4c65-881f-b8f26c618703 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.665820] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.666129] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 805.666129] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52915be8-347d-8e7e-f003-adb154b7f5fc" [ 805.666129] env[62974]: _type = "Task" [ 805.666129] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.675140] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52915be8-347d-8e7e-f003-adb154b7f5fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.930589] env[62974]: DEBUG nova.network.neutron [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.027288] env[62974]: DEBUG nova.network.neutron [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.067500] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.081031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.453s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.081582] env[62974]: DEBUG nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 806.084403] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.867s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.084653] env[62974]: DEBUG nova.objects.instance [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'resources' on Instance uuid 65615fd7-c219-4c19-8ecf-11336b616ead {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.128662] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654357, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.165481] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654358, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.178850] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52915be8-347d-8e7e-f003-adb154b7f5fc, 'name': SearchDatastore_Task, 'duration_secs': 0.030026} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.179945] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae076366-032c-4079-b2e0-a2063910a818 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.188631] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 806.188631] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528fc582-81c2-2701-f79d-911664e2a3b5" [ 806.188631] env[62974]: _type = "Task" [ 806.188631] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.200691] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528fc582-81c2-2701-f79d-911664e2a3b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.533075] env[62974]: DEBUG oslo_concurrency.lockutils [req-22eca2ae-b810-4e24-963d-0fd9d7cf7c7b req-1bfcb228-9604-43c7-88b8-dfaaa7b5ed76 service nova] Releasing lock "refresh_cache-6c7401b6-a69f-4de3-aeb9-26c727d57b76" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.533075] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquired lock "refresh_cache-6c7401b6-a69f-4de3-aeb9-26c727d57b76" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.533075] env[62974]: DEBUG nova.network.neutron [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.593641] env[62974]: DEBUG nova.compute.utils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 806.596385] env[62974]: DEBUG nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Not allocating networking since 'none' was specified. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 806.633854] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654357, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.663381] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654358, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662581} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.666296] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 806.666604] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.667072] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb21e8e3-4efe-4831-904b-dae24edd7974 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.676461] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 806.676461] env[62974]: value = "task-2654359" [ 806.676461] env[62974]: _type = "Task" [ 806.676461] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.690429] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654359, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.705843] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528fc582-81c2-2701-f79d-911664e2a3b5, 'name': SearchDatastore_Task, 'duration_secs': 0.065817} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.706377] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.706730] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 0bc05477-1802-4f8b-8d23-2742f9baf603/0bc05477-1802-4f8b-8d23-2742f9baf603.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.707175] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90adcc7d-f144-48b6-8667-2bd0ae3c414a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.719611] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 806.719611] env[62974]: value = "task-2654360" [ 806.719611] env[62974]: _type = "Task" [ 806.719611] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.731436] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.069993] env[62974]: DEBUG nova.network.neutron [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.100368] env[62974]: DEBUG nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 807.129205] env[62974]: DEBUG oslo_vmware.api [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654357, 'name': PowerOnVM_Task, 'duration_secs': 1.050138} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.129205] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.129205] env[62974]: INFO nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Took 9.09 seconds to spawn the instance on the hypervisor. [ 807.129205] env[62974]: DEBUG nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.131886] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f18883-a33d-49a8-a393-0eee5e22f110 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.192315] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654359, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.217311} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.195866] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 807.198025] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1a587e-33cc-4a97-9393-6bef628608d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.227630] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.230064] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58dd1964-66b7-4249-8f07-5a5da1449c34 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.245780] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa2e21e-5125-48c6-b0d0-0c97042632d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.265389] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50ba4e3-d77e-4690-a4a2-edaf07b37619 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.268650] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654360, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.270244] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 807.270244] env[62974]: value = "task-2654361" [ 807.270244] env[62974]: _type = "Task" [ 807.270244] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.306757] env[62974]: DEBUG nova.network.neutron [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Updating instance_info_cache with network_info: [{"id": "ab4cf036-7af9-44a4-aef7-4da58ac03efa", "address": "fa:16:3e:f8:fd:e0", "network": {"id": "5139ab25-1218-4728-a4fb-6fd38fe8bec4", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-653480784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e83dd99878401ba921033cbd720343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab4cf036-7a", "ovs_interfaceid": "ab4cf036-7af9-44a4-aef7-4da58ac03efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.308824] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73d7a52-e393-4e7c-aa60-ae2317e2aa72 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.311721] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.319988] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec72557-a1ee-4424-823c-bd913ad07396 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.343038] env[62974]: DEBUG nova.compute.provider_tree [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.659409] env[62974]: INFO nova.compute.manager [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Took 37.15 seconds to build instance. [ 807.742176] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654360, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.782406] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.813757] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Releasing lock "refresh_cache-6c7401b6-a69f-4de3-aeb9-26c727d57b76" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.814663] env[62974]: DEBUG nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Instance network_info: |[{"id": "ab4cf036-7af9-44a4-aef7-4da58ac03efa", "address": "fa:16:3e:f8:fd:e0", "network": {"id": "5139ab25-1218-4728-a4fb-6fd38fe8bec4", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-653480784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e83dd99878401ba921033cbd720343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab4cf036-7a", "ovs_interfaceid": "ab4cf036-7af9-44a4-aef7-4da58ac03efa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.814861] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:fd:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab4cf036-7af9-44a4-aef7-4da58ac03efa', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.823121] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Creating folder: Project (08e83dd99878401ba921033cbd720343). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.823456] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bcc7a82-ecae-475f-8314-1bd0a6a5412a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.836983] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Created folder: Project (08e83dd99878401ba921033cbd720343) in parent group-v535199. [ 807.837235] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Creating folder: Instances. Parent ref: group-v535377. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.837505] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41083f55-1a31-4987-ba78-18af3b2b304a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.846163] env[62974]: DEBUG nova.scheduler.client.report [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.851511] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Created folder: Instances in parent group-v535377. [ 807.851792] env[62974]: DEBUG oslo.service.loopingcall [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.852336] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.852575] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2aa614cd-8b8f-4db1-8d01-76cbebbc5d30 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.876034] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.876034] env[62974]: value = "task-2654364" [ 807.876034] env[62974]: _type = "Task" [ 807.876034] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.885337] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654364, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.117330] env[62974]: DEBUG nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 808.144510] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.144790] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.144953] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.145149] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.145296] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.145480] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.145765] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.145992] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.146261] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.146498] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.146743] env[62974]: DEBUG nova.virt.hardware [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.147709] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebc8bc1-9b08-473a-8735-d2c6858afbdf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.156746] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a485b95-1cd6-4eee-9f5c-77cca464c845 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.161165] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e5278ec-5576-4407-8e80-072a41e98022 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.585s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.176288] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.182768] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Creating folder: Project (5f18a45f467641ceacc873c64534a881). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.183824] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05d8defb-ac22-4e60-99bc-bcdd27a98c93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.197037] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Created folder: Project (5f18a45f467641ceacc873c64534a881) in parent group-v535199. [ 808.197248] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Creating folder: Instances. Parent ref: group-v535380. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.197482] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a3ef37b-cdd7-41b0-b193-a25e293c4c15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.209358] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Created folder: Instances in parent group-v535380. [ 808.209618] env[62974]: DEBUG oslo.service.loopingcall [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.209822] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.210053] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20aac4f0-d892-4c22-8048-b3e8dc245bf8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.231223] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.231223] env[62974]: value = "task-2654367" [ 808.231223] env[62974]: _type = "Task" [ 808.231223] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.243513] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654367, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.246223] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654360, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.282919] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654361, 'name': ReconfigVM_Task, 'duration_secs': 0.73849} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.283241] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Reconfigured VM instance instance-00000013 to attach disk [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211/b3827c67-9075-4a53-9f9e-8651e3f4b211.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 808.283865] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd15b641-8696-4f89-80c3-7ec7f712b1bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.292055] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 808.292055] env[62974]: value = "task-2654368" [ 808.292055] env[62974]: _type = "Task" [ 808.292055] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.301464] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654368, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.353487] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.269s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.356411] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.051s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.383083] env[62974]: INFO nova.scheduler.client.report [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted allocations for instance 65615fd7-c219-4c19-8ecf-11336b616ead [ 808.388193] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654364, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.664412] env[62974]: DEBUG nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 808.741710] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654360, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.738091} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.742346] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 0bc05477-1802-4f8b-8d23-2742f9baf603/0bc05477-1802-4f8b-8d23-2742f9baf603.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 808.742564] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 808.742808] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cabac68a-ef0b-4235-a67e-173da1e88c2a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.747528] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654367, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.753385] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 808.753385] env[62974]: value = "task-2654369" [ 808.753385] env[62974]: _type = "Task" [ 808.753385] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.763167] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.803086] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654368, 'name': Rename_Task, 'duration_secs': 0.277299} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.803390] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 808.803682] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cc85645-0093-44e5-af89-0d0482fc6d1d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.813309] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 808.813309] env[62974]: value = "task-2654370" [ 808.813309] env[62974]: _type = "Task" [ 808.813309] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.823885] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654370, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.891025] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654364, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.895108] env[62974]: DEBUG oslo_concurrency.lockutils [None req-079525dc-00d7-435a-8f63-6c23fbe1027f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "65615fd7-c219-4c19-8ecf-11336b616ead" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.818s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.187516] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.247176] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654367, 'name': CreateVM_Task, 'duration_secs': 0.527973} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.247362] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.247906] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.248160] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.248579] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 809.248894] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0499ea69-75d3-40ad-a427-d75f53dc38ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.254828] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 809.254828] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527dba27-a490-d1a4-36fe-5f72d32fa0cb" [ 809.254828] env[62974]: _type = "Task" [ 809.254828] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.266683] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527dba27-a490-d1a4-36fe-5f72d32fa0cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.269844] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15216} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.270113] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 809.270886] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6a0dc7-518e-4710-8fae-0ae8effa0ca5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.295370] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 0bc05477-1802-4f8b-8d23-2742f9baf603/0bc05477-1802-4f8b-8d23-2742f9baf603.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 809.295753] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55cafb53-8ee2-4fe1-b0fc-b01ff75d7fbe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.319287] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 809.319287] env[62974]: value = "task-2654371" [ 809.319287] env[62974]: _type = "Task" [ 809.319287] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.326587] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654370, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.332797] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654371, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.377606] env[62974]: INFO nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating resource usage from migration 53944e14-f97c-4750-952b-d31a40fddfbe [ 809.390673] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654364, 'name': CreateVM_Task, 'duration_secs': 1.094618} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.391187] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.394015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.394015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.394015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 809.394015] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27b68b21-a7a6-4f92-8fc4-36f08d39d8cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.400147] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 809.400147] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525c6044-a6d4-bc02-6d1e-2e20b0339d56" [ 809.400147] env[62974]: _type = "Task" [ 809.400147] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.408017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cf73422d-7f4b-4bae-9d69-de74d7211243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance d8b7a39f-ec73-4a87-9b1e-9428ca72f895 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance b3827c67-9075-4a53-9f9e-8651e3f4b211 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 85f8f79d-330a-49cd-b1ae-8de20c70fcab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408219] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 366b5816-a847-48d1-ad03-5758e473a9d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408219] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 59ece0e8-85c2-499d-aba2-fd45fc116013 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408219] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c763d45b-44f0-4557-a726-7aad2bc58ba8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408219] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance da43a464-ebae-4038-9f7b-330df22d8d7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408333] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a14e7e40-afef-4607-8fa9-935a92ea49dc actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408333] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408333] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 0c2642d5-85fe-4db5-9891-025c88ca8c7c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 809.408333] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 3426d512-d54e-4852-8eca-8ba9f5fef418 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408473] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c002aec9-4fdf-45c9-9ef6-d196c4891e19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408473] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 69fb00b3-6a41-4ef5-8876-6548cae31c07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408473] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c38cddae-95b3-4f4a-bf3a-5f0bdde548a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408473] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 3bcbcf35-294e-4d58-b002-cb84db4316d5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 809.408604] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 6928b412-e8cb-42fb-bc47-dc8498f12ad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408604] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 12c769fb-8c9e-4089-9563-232cfad89b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408604] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 8621428e-cf42-47a4-82c8-a003c377b257 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 809.408604] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.408981] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 5d6a072e-dba7-461d-9d41-8ca003b31102 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 809.409290] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance d6ce3f68-a757-48bc-abeb-49c3aacdf465 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.413017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 1c7fabf7-ba82-4628-9016-b0f198add99a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.413017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance b31dea29-79d6-4117-bdb5-2d38fb660a53 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.413017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.413017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 0bc05477-1802-4f8b-8d23-2742f9baf603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.413327] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 6c7401b6-a69f-4de3-aeb9-26c727d57b76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.413327] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 6e8f07c2-60da-4bad-a7af-8c83294e232f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 809.415958] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525c6044-a6d4-bc02-6d1e-2e20b0339d56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.772070] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527dba27-a490-d1a4-36fe-5f72d32fa0cb, 'name': SearchDatastore_Task, 'duration_secs': 0.016741} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.772543] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.772958] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.773392] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.773676] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.775101] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.775101] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84d42004-aa0d-49ee-a54a-98b39f16a515 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.784964] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.785180] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.786631] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269db0b8-121c-4ef9-abf0-3f0c9008516e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.793296] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 809.793296] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea8f79-481e-ac0d-6eb5-9108ed68a658" [ 809.793296] env[62974]: _type = "Task" [ 809.793296] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.802421] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea8f79-481e-ac0d-6eb5-9108ed68a658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.829594] env[62974]: DEBUG oslo_vmware.api [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654370, 'name': PowerOnVM_Task, 'duration_secs': 0.68933} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.830405] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 809.830677] env[62974]: DEBUG nova.compute.manager [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.831566] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3c9274-9ce9-49a9-a4ac-ebfc0c5e67ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.837990] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654371, 'name': ReconfigVM_Task, 'duration_secs': 0.463882} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.838632] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 0bc05477-1802-4f8b-8d23-2742f9baf603/0bc05477-1802-4f8b-8d23-2742f9baf603.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 809.839714] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f83b9bb5-b36c-4f77-9725-8eb1072adad0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.851121] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 809.851121] env[62974]: value = "task-2654372" [ 809.851121] env[62974]: _type = "Task" [ 809.851121] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.859976] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654372, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.911556] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525c6044-a6d4-bc02-6d1e-2e20b0339d56, 'name': SearchDatastore_Task, 'duration_secs': 0.017909} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.911901] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.912153] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.912382] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.912525] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.912829] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.912942] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2d3547c-85f5-46ba-abeb-c6b0fc7e77a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.917993] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 14523914-68ab-4d39-8eb8-6a786ddcb4dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.925618] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.925915] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.927043] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb0d9839-5d2f-46d1-bc00-668b112ff669 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.935938] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 809.935938] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5248b02e-8e78-1ff8-7ca1-6c4cdc61daa4" [ 809.935938] env[62974]: _type = "Task" [ 809.935938] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.945412] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5248b02e-8e78-1ff8-7ca1-6c4cdc61daa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.308714] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea8f79-481e-ac0d-6eb5-9108ed68a658, 'name': SearchDatastore_Task, 'duration_secs': 0.013155} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.309752] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46077669-eaa5-4b51-9b7b-5951cd04aa1c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.316049] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 810.316049] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522eed5e-c7fc-57d8-49af-f73ebdefe502" [ 810.316049] env[62974]: _type = "Task" [ 810.316049] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.326787] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522eed5e-c7fc-57d8-49af-f73ebdefe502, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.359412] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.364917] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654372, 'name': Rename_Task, 'duration_secs': 0.228454} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.365217] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 810.365904] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b222839-fb67-48d8-94fd-4792ae5a077d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.374212] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 810.374212] env[62974]: value = "task-2654373" [ 810.374212] env[62974]: _type = "Task" [ 810.374212] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.383833] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.421017] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 55229db9-9442-4973-a1f2-7762227167a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.448405] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5248b02e-8e78-1ff8-7ca1-6c4cdc61daa4, 'name': SearchDatastore_Task, 'duration_secs': 0.013636} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.449612] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4b5fcb3-2f7b-4cd9-b229-faa5947dc297 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.455874] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 810.455874] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5208e7a7-2121-5868-acd2-168e55ce3893" [ 810.455874] env[62974]: _type = "Task" [ 810.455874] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.464887] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5208e7a7-2121-5868-acd2-168e55ce3893, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.827651] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522eed5e-c7fc-57d8-49af-f73ebdefe502, 'name': SearchDatastore_Task, 'duration_secs': 0.019304} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.827978] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.828277] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.828535] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-536e96a7-be5a-454a-8719-a6dcd69dcf03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.836823] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 810.836823] env[62974]: value = "task-2654374" [ 810.836823] env[62974]: _type = "Task" [ 810.836823] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.848523] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.884816] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654373, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.926249] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance e42547b0-25b7-4a34-b832-b93103065928 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.971989] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5208e7a7-2121-5868-acd2-168e55ce3893, 'name': SearchDatastore_Task, 'duration_secs': 0.015157} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.972501] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.972815] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6c7401b6-a69f-4de3-aeb9-26c727d57b76/6c7401b6-a69f-4de3-aeb9-26c727d57b76.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.973113] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e86cf9b0-22e0-4c07-ab76-799753d591ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.981465] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 810.981465] env[62974]: value = "task-2654375" [ 810.981465] env[62974]: _type = "Task" [ 810.981465] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.990525] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654375, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.349625] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654374, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.386960] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654373, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.430287] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance e23dbff7-d23e-4909-9b33-67ed15c325e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.493326] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654375, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.848433] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654374, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.755414} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.848433] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.848698] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.849086] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e3cecdb-6c0b-48d8-b85f-2a7bf5efe3e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.857448] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 811.857448] env[62974]: value = "task-2654376" [ 811.857448] env[62974]: _type = "Task" [ 811.857448] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.866467] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654376, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.873982] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.873982] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.890586] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654373, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.934463] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 6243cce3-8611-46fa-8379-e2f3c825c4dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.934613] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Migration 53944e14-f97c-4750-952b-d31a40fddfbe is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 811.934844] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance af370de1-e4d7-4312-bc72-c6398eeaf2ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 811.996585] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654375, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727117} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.996585] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6c7401b6-a69f-4de3-aeb9-26c727d57b76/6c7401b6-a69f-4de3-aeb9-26c727d57b76.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.996585] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.996585] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ba3a6d9-ee23-47bc-a4ea-9c61755fd521 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.007060] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 812.007060] env[62974]: value = "task-2654377" [ 812.007060] env[62974]: _type = "Task" [ 812.007060] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.019435] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654377, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.060781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.060781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.060781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.060781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.061120] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.064731] env[62974]: INFO nova.compute.manager [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Terminating instance [ 812.370195] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654376, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076948} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.370528] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.371581] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e928d9fc-5d18-412e-9bd4-45e657ba2b70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.399496] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.402954] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-744f9961-8faa-41ed-a113-9f9b47750085 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.423251] env[62974]: DEBUG oslo_vmware.api [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654373, 'name': PowerOnVM_Task, 'duration_secs': 1.735681} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.424494] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 812.424700] env[62974]: INFO nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Took 11.63 seconds to spawn the instance on the hypervisor. [ 812.424878] env[62974]: DEBUG nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 812.425219] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 812.425219] env[62974]: value = "task-2654378" [ 812.425219] env[62974]: _type = "Task" [ 812.425219] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.425906] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d191fa0-bf8c-4c62-bc47-af00c9a75673 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.441085] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c1d0b90c-aa1c-485d-850d-a1495feac7c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.442306] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654378, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.518570] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654377, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.215309} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.518985] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.519871] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d65f949-a1c4-4e58-8aac-9f3989b7a43c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.547367] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 6c7401b6-a69f-4de3-aeb9-26c727d57b76/6c7401b6-a69f-4de3-aeb9-26c727d57b76.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.548225] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2784cd45-7066-41be-9367-742a58b5c61a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.572134] env[62974]: DEBUG nova.compute.manager [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.572438] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.573574] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4ee1b7-1c28-4361-89b0-95b8ebe94c44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.583163] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.584417] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5a5b4a4-d5a4-400d-8963-1586e939d673 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.586011] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 812.586011] env[62974]: value = "task-2654379" [ 812.586011] env[62974]: _type = "Task" [ 812.586011] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.594029] env[62974]: DEBUG oslo_vmware.api [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 812.594029] env[62974]: value = "task-2654380" [ 812.594029] env[62974]: _type = "Task" [ 812.594029] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.596563] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654379, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.605735] env[62974]: DEBUG oslo_vmware.api [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.940320] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654378, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.950262] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 514e0f15-f27d-4fab-9107-b92884075420 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.950262] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 25 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 812.950262] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5440MB phys_disk=200GB used_disk=25GB total_vcpus=48 used_vcpus=25 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 812.955913] env[62974]: INFO nova.compute.manager [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Took 41.49 seconds to build instance. [ 813.099385] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654379, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.112824] env[62974]: DEBUG oslo_vmware.api [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654380, 'name': PowerOffVM_Task, 'duration_secs': 0.240213} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.113049] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.113263] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.113501] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a6df801-58f9-4a03-907c-0fd9e9794c09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.206161] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.206161] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.206161] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleting the datastore file [datastore2] 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.206161] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-746b7f8f-6263-4716-9e2e-7c76f5875eac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.213602] env[62974]: DEBUG oslo_vmware.api [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 813.213602] env[62974]: value = "task-2654382" [ 813.213602] env[62974]: _type = "Task" [ 813.213602] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.224521] env[62974]: DEBUG oslo_vmware.api [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.441677] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654378, 'name': ReconfigVM_Task, 'duration_secs': 0.594365} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.442009] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.442626] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b06670ca-ce93-4e08-8827-31c8e0749fda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.450802] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 813.450802] env[62974]: value = "task-2654383" [ 813.450802] env[62974]: _type = "Task" [ 813.450802] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.460628] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6115a6af-fcaa-4adb-8069-a094328b0ffc tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.234s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.460689] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654383, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.487148] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6036dbe5-cdf9-492f-b2af-25cb9e995e57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.497013] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f947d0-a041-4fbf-bca0-951a74d07048 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.534142] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b440dd22-c19d-4a2f-b31c-4f43509e2015 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.545577] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24124c57-e5f3-4464-91cc-08a83990f109 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.564669] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.597616] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654379, 'name': ReconfigVM_Task, 'duration_secs': 0.566386} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.597955] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 6c7401b6-a69f-4de3-aeb9-26c727d57b76/6c7401b6-a69f-4de3-aeb9-26c727d57b76.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.598665] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2775cbc-a7ef-48d7-bcc8-52f301dbf49f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.607143] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 813.607143] env[62974]: value = "task-2654384" [ 813.607143] env[62974]: _type = "Task" [ 813.607143] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.616367] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654384, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.725532] env[62974]: DEBUG oslo_vmware.api [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.439067} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.725712] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.725900] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 813.726105] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.726332] env[62974]: INFO nova.compute.manager [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 813.726561] env[62974]: DEBUG oslo.service.loopingcall [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.726768] env[62974]: DEBUG nova.compute.manager [-] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 813.726899] env[62974]: DEBUG nova.network.neutron [-] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.963661] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654383, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.965306] env[62974]: DEBUG nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 814.023113] env[62974]: DEBUG nova.compute.manager [req-39ce8241-b91a-4c5a-87ba-80ec0aaa4a88 req-4107be18-bfcc-4d00-b74a-31b867a32ac9 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Received event network-vif-deleted-9cfe7952-9fc7-4153-bdf7-356ebd06114e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 814.023113] env[62974]: INFO nova.compute.manager [req-39ce8241-b91a-4c5a-87ba-80ec0aaa4a88 req-4107be18-bfcc-4d00-b74a-31b867a32ac9 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Neutron deleted interface 9cfe7952-9fc7-4153-bdf7-356ebd06114e; detaching it from the instance and deleting it from the info cache [ 814.023113] env[62974]: DEBUG nova.network.neutron [req-39ce8241-b91a-4c5a-87ba-80ec0aaa4a88 req-4107be18-bfcc-4d00-b74a-31b867a32ac9 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.067492] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 814.118165] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654384, 'name': Rename_Task, 'duration_secs': 0.308392} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.118451] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.118732] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8a0828d-a1fe-4a36-b117-1eea5d3daf66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.126338] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 814.126338] env[62974]: value = "task-2654385" [ 814.126338] env[62974]: _type = "Task" [ 814.126338] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.135825] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.463683] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654383, 'name': Rename_Task, 'duration_secs': 0.753063} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.464113] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.464411] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fa0b58b-0ca9-4ecf-bd14-17bf8dd6ca6b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.475252] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 814.475252] env[62974]: value = "task-2654386" [ 814.475252] env[62974]: _type = "Task" [ 814.475252] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.487607] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.492476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.492580] env[62974]: DEBUG nova.network.neutron [-] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.525221] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51f612ad-b62b-4c61-be38-6c94647324c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.538764] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bde8cd-6cac-41ce-ba20-6e32f6d58f27 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.589244] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 814.589634] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.233s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.590192] env[62974]: DEBUG nova.compute.manager [req-39ce8241-b91a-4c5a-87ba-80ec0aaa4a88 req-4107be18-bfcc-4d00-b74a-31b867a32ac9 service nova] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Detach interface failed, port_id=9cfe7952-9fc7-4153-bdf7-356ebd06114e, reason: Instance 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 814.590613] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.088s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.592374] env[62974]: INFO nova.compute.claims [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.596778] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 814.596778] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Cleaning up deleted instances {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 814.638883] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654385, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.920972] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.920972] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.920972] env[62974]: DEBUG nova.compute.manager [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.921968] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc1e3f2-cf18-4120-9edd-1e29a6ced968 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.930269] env[62974]: DEBUG nova.compute.manager [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 814.930987] env[62974]: DEBUG nova.objects.instance [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lazy-loading 'flavor' on Instance uuid b31dea29-79d6-4117-bdb5-2d38fb660a53 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 814.988892] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654386, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.995443] env[62974]: INFO nova.compute.manager [-] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Took 1.27 seconds to deallocate network for instance. [ 815.110362] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] There are 37 instances to clean {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 815.110659] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf6e4f04-f5f4-46cb-884b-8014af903a10] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 815.143035] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654385, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.488837] env[62974]: DEBUG oslo_vmware.api [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654386, 'name': PowerOnVM_Task, 'duration_secs': 0.687905} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.491234] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.491436] env[62974]: INFO nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Took 7.37 seconds to spawn the instance on the hypervisor. [ 815.491613] env[62974]: DEBUG nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.492597] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5a5dc0-c64f-495f-8814-1d30c6a52812 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.505567] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.595106] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e346c1-8a67-4cf8-a5e7-2a63adf2ce0d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.606149] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ecca26-a86a-49e0-976c-67cde31c7fd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.635683] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 65615fd7-c219-4c19-8ecf-11336b616ead] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 815.641233] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338cc559-a258-4bd6-b7cd-cee8ca7fa7e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.650580] env[62974]: DEBUG oslo_vmware.api [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654385, 'name': PowerOnVM_Task, 'duration_secs': 1.110723} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.652282] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.652458] env[62974]: INFO nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Took 11.50 seconds to spawn the instance on the hypervisor. [ 815.652815] env[62974]: DEBUG nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.653459] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11f1b99-0125-42db-8661-8b1170fc89e0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.656733] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521c2ab6-15e0-4cd1-b75a-0e3fa40a7721 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.675770] env[62974]: DEBUG nova.compute.provider_tree [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.938697] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 815.939151] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-795f4b65-528b-4782-a5e9-499d35d1ee86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.948593] env[62974]: DEBUG oslo_vmware.api [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 815.948593] env[62974]: value = "task-2654387" [ 815.948593] env[62974]: _type = "Task" [ 815.948593] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.960504] env[62974]: DEBUG oslo_vmware.api [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.014461] env[62974]: INFO nova.compute.manager [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Took 39.09 seconds to build instance. [ 816.144213] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 4967d5be-6cd4-4f23-aca4-d9ae11112369] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 816.181880] env[62974]: DEBUG nova.scheduler.client.report [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.187393] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d33f35-f5e6-7e8d-3563-614fc3c3c994/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 816.190167] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73176c3-319e-4455-abea-4d53b8137089 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.194127] env[62974]: INFO nova.compute.manager [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Took 43.44 seconds to build instance. [ 816.199601] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d33f35-f5e6-7e8d-3563-614fc3c3c994/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 816.199768] env[62974]: ERROR oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d33f35-f5e6-7e8d-3563-614fc3c3c994/disk-0.vmdk due to incomplete transfer. [ 816.200237] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-86a064b1-e9bd-4fc3-9fa5-87edf25ad080 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.213616] env[62974]: DEBUG oslo_vmware.rw_handles [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d33f35-f5e6-7e8d-3563-614fc3c3c994/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 816.213916] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Uploaded image a51f6776-a571-4d03-938a-5a97a88c6d55 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 816.216767] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 816.217280] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1059910e-c229-4fef-adf4-626cfb48f78a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.225389] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 816.225389] env[62974]: value = "task-2654388" [ 816.225389] env[62974]: _type = "Task" [ 816.225389] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.236259] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654388, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.458761] env[62974]: DEBUG oslo_vmware.api [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654387, 'name': PowerOffVM_Task, 'duration_secs': 0.392527} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.459050] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 816.459254] env[62974]: DEBUG nova.compute.manager [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 816.460055] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f94c61-8e3c-4ad3-a854-d58fc1dbf9ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.517750] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9fa7242d-71f3-4582-a1f0-f370acf84ab4 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "6e8f07c2-60da-4bad-a7af-8c83294e232f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.302s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.558312] env[62974]: INFO nova.compute.manager [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Rebuilding instance [ 816.599208] env[62974]: DEBUG nova.compute.manager [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 816.600193] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76814a57-7f8c-46bd-bff3-0f1495767605 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.647406] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: bcacc508-b910-4144-bf0b-454b0928ca71] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 816.695051] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.104s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.695589] env[62974]: DEBUG nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.698174] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.730s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.700137] env[62974]: INFO nova.compute.claims [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.705054] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d66325-d5b2-434f-aefd-4ba3537b6efc tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.554s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.737389] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654388, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.973724] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d10b05e-bc48-4be1-a008-3fa940293437 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.151852] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d941a678-1b67-4e0f-8806-e6682ef21774] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 817.204977] env[62974]: DEBUG nova.compute.utils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 817.209693] env[62974]: DEBUG nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 817.209879] env[62974]: DEBUG nova.network.neutron [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.240509] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654388, 'name': Destroy_Task, 'duration_secs': 0.95618} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.241236] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Destroyed the VM [ 817.241566] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 817.241877] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4e832849-d4b2-4c97-970e-adebc22b4b13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.251028] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 817.251028] env[62974]: value = "task-2654389" [ 817.251028] env[62974]: _type = "Task" [ 817.251028] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.260888] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654389, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.262518] env[62974]: DEBUG nova.policy [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e07ae60010640d88de0d3b716914186', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd914830aaf454e26b77cbb46722764ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.378142] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.379831] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.379831] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.379831] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.379831] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.382903] env[62974]: INFO nova.compute.manager [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Terminating instance [ 817.539390] env[62974]: DEBUG nova.network.neutron [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Successfully created port: e66d1ea1-70df-427f-8578-45c959a08ad6 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.614853] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.615273] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfe5c8a6-ff4b-4298-abe0-9c43d38e0a18 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.624773] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 817.624773] env[62974]: value = "task-2654390" [ 817.624773] env[62974]: _type = "Task" [ 817.624773] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.638134] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.655733] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 669cd72c-556f-40b6-8bc2-f50a125c182a] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 817.710968] env[62974]: DEBUG nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.715727] env[62974]: DEBUG nova.objects.instance [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lazy-loading 'flavor' on Instance uuid b31dea29-79d6-4117-bdb5-2d38fb660a53 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.770487] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654389, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.887122] env[62974]: DEBUG nova.compute.manager [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 817.887454] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.888699] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4436941-af80-46fa-ae8a-5062c64ff9da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.901613] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.901613] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2a688c2-e25a-4c41-97dd-2fee99bcd3c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.910201] env[62974]: DEBUG oslo_vmware.api [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 817.910201] env[62974]: value = "task-2654391" [ 817.910201] env[62974]: _type = "Task" [ 817.910201] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.923489] env[62974]: DEBUG oslo_vmware.api [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.138985] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654390, 'name': PowerOffVM_Task, 'duration_secs': 0.198768} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.139294] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 818.139532] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 818.140362] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c223e7-11e1-453a-8dd0-bc5c6e57abed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.149148] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 818.149411] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5419a95-cd15-496c-b146-ea5f5f522dba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.160515] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: ea2227ff-f694-4baa-af17-dc50338d8fa6] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 818.181865] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 818.182175] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 818.182385] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Deleting the datastore file [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 818.182652] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13d870df-5ab7-4f89-9c1f-b51e6411a2b3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.190800] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 818.190800] env[62974]: value = "task-2654393" [ 818.190800] env[62974]: _type = "Task" [ 818.190800] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.204397] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.224977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.225173] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquired lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.225340] env[62974]: DEBUG nova.network.neutron [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.225516] env[62974]: DEBUG nova.objects.instance [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lazy-loading 'info_cache' on Instance uuid b31dea29-79d6-4117-bdb5-2d38fb660a53 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.266744] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654389, 'name': RemoveSnapshot_Task, 'duration_secs': 0.806797} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.267079] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 818.267370] env[62974]: DEBUG nova.compute.manager [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.268167] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d5029b-3e86-464c-abb1-d5b990614b73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.326175] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029ccd3c-106d-44f0-a5b1-4ae406681f12 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.336603] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a536043-028a-49ff-9415-6f511fbbeecf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.365553] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbaaffc-86e5-4822-913e-df8023aa5d08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.375322] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adc4785-06e0-4b79-83a7-d5e0c03e71a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.391925] env[62974]: DEBUG nova.compute.provider_tree [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.420452] env[62974]: DEBUG oslo_vmware.api [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654391, 'name': PowerOffVM_Task, 'duration_secs': 0.250996} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.420683] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 818.420988] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 818.421332] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9f06c1a-fd0e-41a3-b3b3-974fdefe80be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.508041] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 818.508041] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 818.509022] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Deleting the datastore file [datastore1] 6c7401b6-a69f-4de3-aeb9-26c727d57b76 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 818.509022] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80e1019d-7b42-474b-9043-6093fa095042 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.517028] env[62974]: DEBUG oslo_vmware.api [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for the task: (returnval){ [ 818.517028] env[62974]: value = "task-2654395" [ 818.517028] env[62974]: _type = "Task" [ 818.517028] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.525892] env[62974]: DEBUG oslo_vmware.api [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.665019] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 2ebb3385-4177-4506-a4b0-52b53405cf49] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 818.702632] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.448187} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.702632] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 818.702632] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 818.702632] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 818.731077] env[62974]: DEBUG nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.734459] env[62974]: DEBUG nova.objects.base [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 818.761921] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.762192] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.762349] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.762531] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.762676] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.762853] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.763079] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.763241] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.763407] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.763567] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.763741] env[62974]: DEBUG nova.virt.hardware [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.764639] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f50e7a0-bd22-4f36-8e7e-80cf515a6dec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.773614] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce3f7e6-0566-4d18-b8ec-bfef089dfffc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.783013] env[62974]: INFO nova.compute.manager [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Shelve offloading [ 818.895501] env[62974]: DEBUG nova.scheduler.client.report [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.921938] env[62974]: DEBUG nova.compute.manager [req-ed3c8b81-fe5d-40ee-ac4b-82986ec2fba2 req-1910762f-2da0-4d9c-a0a2-f244d2bdfe4b service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Received event network-vif-plugged-e66d1ea1-70df-427f-8578-45c959a08ad6 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 818.922104] env[62974]: DEBUG oslo_concurrency.lockutils [req-ed3c8b81-fe5d-40ee-ac4b-82986ec2fba2 req-1910762f-2da0-4d9c-a0a2-f244d2bdfe4b service nova] Acquiring lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.922374] env[62974]: DEBUG oslo_concurrency.lockutils [req-ed3c8b81-fe5d-40ee-ac4b-82986ec2fba2 req-1910762f-2da0-4d9c-a0a2-f244d2bdfe4b service nova] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.922469] env[62974]: DEBUG oslo_concurrency.lockutils [req-ed3c8b81-fe5d-40ee-ac4b-82986ec2fba2 req-1910762f-2da0-4d9c-a0a2-f244d2bdfe4b service nova] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.922675] env[62974]: DEBUG nova.compute.manager [req-ed3c8b81-fe5d-40ee-ac4b-82986ec2fba2 req-1910762f-2da0-4d9c-a0a2-f244d2bdfe4b service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] No waiting events found dispatching network-vif-plugged-e66d1ea1-70df-427f-8578-45c959a08ad6 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.922853] env[62974]: WARNING nova.compute.manager [req-ed3c8b81-fe5d-40ee-ac4b-82986ec2fba2 req-1910762f-2da0-4d9c-a0a2-f244d2bdfe4b service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Received unexpected event network-vif-plugged-e66d1ea1-70df-427f-8578-45c959a08ad6 for instance with vm_state building and task_state spawning. [ 819.012885] env[62974]: DEBUG nova.network.neutron [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Successfully updated port: e66d1ea1-70df-427f-8578-45c959a08ad6 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.027549] env[62974]: DEBUG oslo_vmware.api [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.168442] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 69597c3f-ccb2-474d-bb7c-629c5da0b456] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 819.286315] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.286643] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-631e47a8-c0b6-4a77-ac6d-1fb853efb05a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.294975] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 819.294975] env[62974]: value = "task-2654396" [ 819.294975] env[62974]: _type = "Task" [ 819.294975] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.306811] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 819.307048] env[62974]: DEBUG nova.compute.manager [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.307821] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebe14a8-6d16-451c-975a-d4fc39703898 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.319771] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.319886] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.320038] env[62974]: DEBUG nova.network.neutron [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.400704] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.401283] env[62974]: DEBUG nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 819.404107] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.219s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.405939] env[62974]: INFO nova.compute.claims [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 819.449062] env[62974]: DEBUG nova.network.neutron [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Updating instance_info_cache with network_info: [{"id": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "address": "fa:16:3e:7f:18:40", "network": {"id": "8d0b87df-eaa2-413e-9805-f9b881c05eaf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1479208503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5fcbb1d7aa1440cb5c5fbe27662a39e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabd131b8-9d", "ovs_interfaceid": "abd131b8-9d9e-4230-b1d9-19c7a25bb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.516288] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "refresh_cache-14523914-68ab-4d39-8eb8-6a786ddcb4dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.516485] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "refresh_cache-14523914-68ab-4d39-8eb8-6a786ddcb4dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.516689] env[62974]: DEBUG nova.network.neutron [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.527432] env[62974]: DEBUG oslo_vmware.api [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Task: {'id': task-2654395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.597723} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.528176] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 819.528360] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 819.528581] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 819.528776] env[62974]: INFO nova.compute.manager [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Took 1.64 seconds to destroy the instance on the hypervisor. [ 819.529080] env[62974]: DEBUG oslo.service.loopingcall [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.529278] env[62974]: DEBUG nova.compute.manager [-] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 819.529371] env[62974]: DEBUG nova.network.neutron [-] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 819.672892] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 28c247f6-3179-425d-ae1c-615151b1e2ff] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 819.743622] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 819.743884] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 819.744055] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 819.744243] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 819.744386] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 819.744532] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 819.744738] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 819.744895] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 819.745195] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 819.745391] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 819.745564] env[62974]: DEBUG nova.virt.hardware [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 819.746483] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8c9e1e-8bc9-4884-ac23-de9f1573f848 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.756383] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ee4cb0-263d-48f9-849a-4f31e49999fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.772884] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Instance VIF info [] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.778758] env[62974]: DEBUG oslo.service.loopingcall [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.779299] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 819.779530] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7db5d271-ea26-4c03-98c4-c79dbaa76e54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.797678] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.797678] env[62974]: value = "task-2654397" [ 819.797678] env[62974]: _type = "Task" [ 819.797678] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.806667] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654397, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.910637] env[62974]: DEBUG nova.compute.utils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 819.917642] env[62974]: DEBUG nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.917880] env[62974]: DEBUG nova.network.neutron [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.951982] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Releasing lock "refresh_cache-b31dea29-79d6-4117-bdb5-2d38fb660a53" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.985543] env[62974]: DEBUG nova.policy [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7337dc651b624b41a4dae92e0603c534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ecf0c1b56e34a6cbc2d073089e37efc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 820.076033] env[62974]: DEBUG nova.network.neutron [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.121691] env[62974]: DEBUG nova.network.neutron [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": "07b0aa8b-b38d-489b-9998-6efe6126083f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.176998] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6e81e765-4fe3-42a7-a0ba-9860be897a70] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 820.227481] env[62974]: DEBUG nova.network.neutron [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Updating instance_info_cache with network_info: [{"id": "e66d1ea1-70df-427f-8578-45c959a08ad6", "address": "fa:16:3e:2c:18:bb", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape66d1ea1-70", "ovs_interfaceid": "e66d1ea1-70df-427f-8578-45c959a08ad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.262305] env[62974]: DEBUG nova.network.neutron [-] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.309745] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654397, 'name': CreateVM_Task, 'duration_secs': 0.329078} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.309917] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.310346] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.310505] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.310811] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 820.311074] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57287568-cc7c-4cc0-bcbf-448b9095057d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.316085] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 820.316085] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6703e-a555-b284-c9b6-c436f2acc22b" [ 820.316085] env[62974]: _type = "Task" [ 820.316085] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.324582] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6703e-a555-b284-c9b6-c436f2acc22b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.373504] env[62974]: DEBUG nova.network.neutron [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Successfully created port: 8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.417021] env[62974]: DEBUG nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.627387] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.683202] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 5bc466fb-eebb-40b1-ba09-614a25782ecd] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 820.730083] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "refresh_cache-14523914-68ab-4d39-8eb8-6a786ddcb4dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.730412] env[62974]: DEBUG nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Instance network_info: |[{"id": "e66d1ea1-70df-427f-8578-45c959a08ad6", "address": "fa:16:3e:2c:18:bb", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape66d1ea1-70", "ovs_interfaceid": "e66d1ea1-70df-427f-8578-45c959a08ad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.733628] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:18:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06cc7c49-c46c-4c1e-bf51-77e9ea802c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e66d1ea1-70df-427f-8578-45c959a08ad6', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.741041] env[62974]: DEBUG oslo.service.loopingcall [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.742133] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.742413] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e93a76ae-aa3f-46d7-9dca-8bc2ce454d65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.765478] env[62974]: INFO nova.compute.manager [-] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Took 1.24 seconds to deallocate network for instance. [ 820.774485] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.774485] env[62974]: value = "task-2654398" [ 820.774485] env[62974]: _type = "Task" [ 820.774485] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.785780] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654398, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.843249] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6703e-a555-b284-c9b6-c436f2acc22b, 'name': SearchDatastore_Task, 'duration_secs': 0.010256} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.843673] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.844017] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.844346] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.844530] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.844720] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.848706] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d9c32de-a950-45a3-9f4b-ecce9f24103f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.861355] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.861664] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.866285] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c2233fb-7d6d-4913-80d2-1f056912b011 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.876216] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 820.876216] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522f8b0b-cacf-51e5-39ad-b995b323f837" [ 820.876216] env[62974]: _type = "Task" [ 820.876216] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.893155] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522f8b0b-cacf-51e5-39ad-b995b323f837, 'name': SearchDatastore_Task, 'duration_secs': 0.014447} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.897377] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-740b4869-c8e3-4165-b2ad-473b05aaaed2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.903682] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 820.903682] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6c2dd-8af9-693c-6689-2eea8c199a07" [ 820.903682] env[62974]: _type = "Task" [ 820.903682] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.912472] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6c2dd-8af9-693c-6689-2eea8c199a07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.940739] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.942022] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44288a5-0cee-4548-8478-d7a3310a82a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.955624] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.957110] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7adcfe2b-00d5-42e5-b864-971b4d37dcf7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.960751] env[62974]: DEBUG nova.compute.manager [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Received event network-changed-e66d1ea1-70df-427f-8578-45c959a08ad6 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 820.960751] env[62974]: DEBUG nova.compute.manager [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Refreshing instance network info cache due to event network-changed-e66d1ea1-70df-427f-8578-45c959a08ad6. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 820.960893] env[62974]: DEBUG oslo_concurrency.lockutils [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] Acquiring lock "refresh_cache-14523914-68ab-4d39-8eb8-6a786ddcb4dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.961058] env[62974]: DEBUG oslo_concurrency.lockutils [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] Acquired lock "refresh_cache-14523914-68ab-4d39-8eb8-6a786ddcb4dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.961233] env[62974]: DEBUG nova.network.neutron [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Refreshing network info cache for port e66d1ea1-70df-427f-8578-45c959a08ad6 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.962663] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.966123] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d28ad668-ba37-4d27-9d46-12cff88afba6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.975288] env[62974]: DEBUG oslo_vmware.api [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 820.975288] env[62974]: value = "task-2654399" [ 820.975288] env[62974]: _type = "Task" [ 820.975288] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.989395] env[62974]: DEBUG oslo_vmware.api [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.993198] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4587a3e8-c4ae-4f34-b32a-6275d1e9b9a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.001212] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0bd9d6-1f5c-47d9-8dc0-4524eac867b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.036752] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1303a327-795f-4483-92a9-4bb9f1b0c4ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.041267] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 821.041553] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 821.042186] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleting the datastore file [datastore1] 366b5816-a847-48d1-ad03-5758e473a9d0 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.045784] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6afa4efe-2caa-4f86-8672-e30d69d0a54c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.048401] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3025332-c53a-43a5-be28-46580837da01 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.055016] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 821.055016] env[62974]: value = "task-2654401" [ 821.055016] env[62974]: _type = "Task" [ 821.055016] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.069275] env[62974]: DEBUG nova.compute.provider_tree [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.078031] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.188689] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 605b1e4c-9bd7-41cd-b5fe-05dd5d7af245] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 821.276134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.285806] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654398, 'name': CreateVM_Task, 'duration_secs': 0.43347} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.286090] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.286687] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.286852] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.287224] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 821.287479] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d3f2945-64d0-4754-8f40-8f1bd592586b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.293072] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 821.293072] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521e4c00-5c34-7078-f651-dd86ca129b3b" [ 821.293072] env[62974]: _type = "Task" [ 821.293072] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.301408] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521e4c00-5c34-7078-f651-dd86ca129b3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.416363] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b6c2dd-8af9-693c-6689-2eea8c199a07, 'name': SearchDatastore_Task, 'duration_secs': 0.008959} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.416692] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.416946] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.417240] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-174950ae-512f-459e-bab1-76d7e04d3f1f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.426058] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 821.426058] env[62974]: value = "task-2654402" [ 821.426058] env[62974]: _type = "Task" [ 821.426058] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.430014] env[62974]: DEBUG nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 821.438675] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.457184] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.457430] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.457584] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.457761] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.457904] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.458063] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.458274] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.458430] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.458613] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.458837] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.458970] env[62974]: DEBUG nova.virt.hardware [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.459862] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdff5b3-0ede-400e-bf53-adcf42dd2fdf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.470173] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575f0097-ae5b-4ead-83c4-431fbcbf0e44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.493876] env[62974]: DEBUG oslo_vmware.api [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654399, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.568864] env[62974]: DEBUG oslo_vmware.api [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213165} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.569185] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 821.569459] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 821.570291] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 821.573146] env[62974]: DEBUG nova.scheduler.client.report [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.596670] env[62974]: INFO nova.scheduler.client.report [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleted allocations for instance 366b5816-a847-48d1-ad03-5758e473a9d0 [ 821.692099] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 79d0e6d6-b89a-46a3-b2ae-d6c04c92ede7] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 821.703668] env[62974]: DEBUG nova.network.neutron [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Updated VIF entry in instance network info cache for port e66d1ea1-70df-427f-8578-45c959a08ad6. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.704020] env[62974]: DEBUG nova.network.neutron [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Updating instance_info_cache with network_info: [{"id": "e66d1ea1-70df-427f-8578-45c959a08ad6", "address": "fa:16:3e:2c:18:bb", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape66d1ea1-70", "ovs_interfaceid": "e66d1ea1-70df-427f-8578-45c959a08ad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.807271] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521e4c00-5c34-7078-f651-dd86ca129b3b, 'name': SearchDatastore_Task, 'duration_secs': 0.010791} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.807795] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.808160] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.808472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.808731] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.808956] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.809285] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e189658-dabf-4a18-9b91-a2a3a79e1c89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.827209] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.827345] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.828224] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac262b91-ea3e-4fe7-a63a-5ab5fad2f4fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.836665] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 821.836665] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a4de43-aad4-66d2-2494-399d6fb3cb9a" [ 821.836665] env[62974]: _type = "Task" [ 821.836665] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.847545] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a4de43-aad4-66d2-2494-399d6fb3cb9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.938901] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654402, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.992269] env[62974]: DEBUG oslo_vmware.api [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654399, 'name': PowerOnVM_Task, 'duration_secs': 0.611248} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.992756] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.993111] env[62974]: DEBUG nova.compute.manager [None req-6735cda0-ce66-4b47-a5a7-c661e3e59c02 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.995893] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c267b0-a216-423e-825a-ab9b14099c03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.073595] env[62974]: DEBUG nova.compute.manager [req-34a2877c-b2a2-434d-a59f-3edfd91ed613 req-a643aa6c-67de-4a33-93a7-15b196bf1e02 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Received event network-vif-plugged-8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.074152] env[62974]: DEBUG oslo_concurrency.lockutils [req-34a2877c-b2a2-434d-a59f-3edfd91ed613 req-a643aa6c-67de-4a33-93a7-15b196bf1e02 service nova] Acquiring lock "55229db9-9442-4973-a1f2-7762227167a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.074587] env[62974]: DEBUG oslo_concurrency.lockutils [req-34a2877c-b2a2-434d-a59f-3edfd91ed613 req-a643aa6c-67de-4a33-93a7-15b196bf1e02 service nova] Lock "55229db9-9442-4973-a1f2-7762227167a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.074997] env[62974]: DEBUG oslo_concurrency.lockutils [req-34a2877c-b2a2-434d-a59f-3edfd91ed613 req-a643aa6c-67de-4a33-93a7-15b196bf1e02 service nova] Lock "55229db9-9442-4973-a1f2-7762227167a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.075377] env[62974]: DEBUG nova.compute.manager [req-34a2877c-b2a2-434d-a59f-3edfd91ed613 req-a643aa6c-67de-4a33-93a7-15b196bf1e02 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] No waiting events found dispatching network-vif-plugged-8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.076482] env[62974]: WARNING nova.compute.manager [req-34a2877c-b2a2-434d-a59f-3edfd91ed613 req-a643aa6c-67de-4a33-93a7-15b196bf1e02 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Received unexpected event network-vif-plugged-8c1e40ea-8afa-424a-9c2d-65f7e1179366 for instance with vm_state building and task_state spawning. [ 822.079320] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.080540] env[62974]: DEBUG nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 822.085854] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.601s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.085854] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.087981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 34.583s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.102343] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.112197] env[62974]: INFO nova.scheduler.client.report [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted allocations for instance 3bcbcf35-294e-4d58-b002-cb84db4316d5 [ 822.180329] env[62974]: DEBUG nova.network.neutron [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Successfully updated port: 8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.197525] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: a7a014b9-10e1-45a0-85da-4754051e8d82] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 822.206900] env[62974]: DEBUG oslo_concurrency.lockutils [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] Releasing lock "refresh_cache-14523914-68ab-4d39-8eb8-6a786ddcb4dc" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.207385] env[62974]: DEBUG nova.compute.manager [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Received event network-vif-deleted-ab4cf036-7af9-44a4-aef7-4da58ac03efa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.209058] env[62974]: DEBUG nova.compute.manager [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received event network-vif-unplugged-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.209058] env[62974]: DEBUG oslo_concurrency.lockutils [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.209058] env[62974]: DEBUG oslo_concurrency.lockutils [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.209058] env[62974]: DEBUG oslo_concurrency.lockutils [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.209058] env[62974]: DEBUG nova.compute.manager [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] No waiting events found dispatching network-vif-unplugged-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.209337] env[62974]: WARNING nova.compute.manager [req-b122da87-65be-4dca-8b72-9c02a6c2eef9 req-3fc47235-d80f-4989-a95e-f0003bb4faeb service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received unexpected event network-vif-unplugged-07b0aa8b-b38d-489b-9998-6efe6126083f for instance with vm_state shelved and task_state shelving_offloading. [ 822.349497] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a4de43-aad4-66d2-2494-399d6fb3cb9a, 'name': SearchDatastore_Task, 'duration_secs': 0.057372} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.350318] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d7b43b1-6b05-44d6-b7f2-4a6e873cfdfd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.357227] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 822.357227] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520111b0-c168-9be8-6b78-7713e665e00a" [ 822.357227] env[62974]: _type = "Task" [ 822.357227] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.365366] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520111b0-c168-9be8-6b78-7713e665e00a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.437485] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654402, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.677266} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.437485] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.437721] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.437976] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32ea58b9-cc7c-4c4a-8070-1cd54647b22b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.446052] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 822.446052] env[62974]: value = "task-2654403" [ 822.446052] env[62974]: _type = "Task" [ 822.446052] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.454018] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654403, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.585882] env[62974]: DEBUG nova.compute.utils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 822.586788] env[62974]: DEBUG nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 822.586953] env[62974]: DEBUG nova.network.neutron [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 822.592239] env[62974]: INFO nova.compute.claims [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.621154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ab1e9e6-bb55-4320-b221-dce1728aed1b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "3bcbcf35-294e-4d58-b002-cb84db4316d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.849s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.649709] env[62974]: DEBUG nova.policy [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a642fe375c743b7958ddeb1490a8032', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e0a57dfe83843708e333b70e0cc2bc4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 822.682616] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.682827] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.682963] env[62974]: DEBUG nova.network.neutron [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.699782] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 30fcd64c-4570-454b-a7e5-3246c92d90fc] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 822.870284] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520111b0-c168-9be8-6b78-7713e665e00a, 'name': SearchDatastore_Task, 'duration_secs': 0.009818} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.870772] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.871248] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 14523914-68ab-4d39-8eb8-6a786ddcb4dc/14523914-68ab-4d39-8eb8-6a786ddcb4dc.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.871794] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcaab671-dd35-4509-9931-f071a8cc5688 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.882241] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 822.882241] env[62974]: value = "task-2654404" [ 822.882241] env[62974]: _type = "Task" [ 822.882241] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.892863] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654404, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.960536] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654403, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.220434} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.960970] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.961997] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ae27fa-88b1-49cd-bdbb-f25ab219ae0b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.991019] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.991019] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-153f58c4-285c-4327-8ac7-474f9969e7a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.015023] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 823.015023] env[62974]: value = "task-2654405" [ 823.015023] env[62974]: _type = "Task" [ 823.015023] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.022165] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.090586] env[62974]: DEBUG nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 823.097986] env[62974]: INFO nova.compute.resource_tracker [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating resource usage from migration 53944e14-f97c-4750-952b-d31a40fddfbe [ 823.204175] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 22a0a34a-c46b-4246-9a80-3540550bd793] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 823.250279] env[62974]: DEBUG nova.network.neutron [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.393126] env[62974]: DEBUG nova.network.neutron [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Successfully created port: 4c5397a0-f933-4f39-911d-525d8d7e5aac {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.399437] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654404, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486346} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.402662] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 14523914-68ab-4d39-8eb8-6a786ddcb4dc/14523914-68ab-4d39-8eb8-6a786ddcb4dc.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.402891] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.404073] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3289973-7103-43cc-be8b-5430ca3e24c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.414538] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 823.414538] env[62974]: value = "task-2654406" [ 823.414538] env[62974]: _type = "Task" [ 823.414538] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.431031] env[62974]: DEBUG nova.network.neutron [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.435517] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654406, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.449776] env[62974]: DEBUG nova.compute.manager [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received event network-changed-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 823.450032] env[62974]: DEBUG nova.compute.manager [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Refreshing instance network info cache due to event network-changed-07b0aa8b-b38d-489b-9998-6efe6126083f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 823.450424] env[62974]: DEBUG oslo_concurrency.lockutils [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] Acquiring lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.450605] env[62974]: DEBUG oslo_concurrency.lockutils [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] Acquired lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.450788] env[62974]: DEBUG nova.network.neutron [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Refreshing network info cache for port 07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.527316] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654405, 'name': ReconfigVM_Task, 'duration_secs': 0.461497} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.533018] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f/6e8f07c2-60da-4bad-a7af-8c83294e232f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.533018] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8d7de73-a79e-4978-8d9b-d9e8da17f156 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.539969] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 823.539969] env[62974]: value = "task-2654407" [ 823.539969] env[62974]: _type = "Task" [ 823.539969] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.554715] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654407, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.708298] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b5fcf8aa-2e02-4b25-a186-aa0f1fbb3e70] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 823.745234] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441ef1d2-6057-41f4-a504-abf7ef57b8e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.755320] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443957b5-4d0d-4b68-a618-2bbb8c9ad89a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.792753] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9e1071-6893-4572-8e36-f71fefd5810f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.801453] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8084c1-dc5f-4286-8ef0-086913256d5b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.815706] env[62974]: DEBUG nova.compute.provider_tree [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.930279] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654406, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076126} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.930590] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.931385] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1857d95c-bb7b-4a96-b0fc-ca5383448124 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.936565] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.936898] env[62974]: DEBUG nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Instance network_info: |[{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.947143] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:f3:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7f41333-42ee-47f3-936c-d6701ab786d2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c1e40ea-8afa-424a-9c2d-65f7e1179366', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.954667] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating folder: Project (5ecf0c1b56e34a6cbc2d073089e37efc). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 823.964054] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 14523914-68ab-4d39-8eb8-6a786ddcb4dc/14523914-68ab-4d39-8eb8-6a786ddcb4dc.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.967082] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f232abd-dc0b-4648-9cce-abf00db64f69 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.968710] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8208df73-a0f4-4bf3-88ed-5558bfa416bc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.989569] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 823.989569] env[62974]: value = "task-2654409" [ 823.989569] env[62974]: _type = "Task" [ 823.989569] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.993925] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Created folder: Project (5ecf0c1b56e34a6cbc2d073089e37efc) in parent group-v535199. [ 823.994139] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating folder: Instances. Parent ref: group-v535385. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 823.994726] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db44e90d-44c0-4dc8-9315-1a1ea1119e17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.000132] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654409, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.004180] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Created folder: Instances in parent group-v535385. [ 824.004416] env[62974]: DEBUG oslo.service.loopingcall [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.004608] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.004817] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fdcfa33-b5b7-4b8c-b4d9-113bfb109743 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.027141] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.027141] env[62974]: value = "task-2654411" [ 824.027141] env[62974]: _type = "Task" [ 824.027141] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.035489] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654411, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.051039] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654407, 'name': Rename_Task, 'duration_secs': 0.154604} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.051338] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.051580] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3649b97-c44a-439d-bc49-69e549906d8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.058950] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 824.058950] env[62974]: value = "task-2654412" [ 824.058950] env[62974]: _type = "Task" [ 824.058950] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.068471] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.101193] env[62974]: DEBUG nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 824.147032] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 824.147236] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 824.147402] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 824.147584] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 824.147728] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 824.147872] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 824.148088] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 824.148691] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 824.148691] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 824.148691] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 824.148870] env[62974]: DEBUG nova.virt.hardware [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 824.149730] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43ca925-6ad2-43a2-941f-5325edab6a76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.164866] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60fd6dc-a8e2-4270-95a5-48b8011cf993 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.185253] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74d12a07-e18b-4886-a5ad-3b5da65dea4e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "2392fb5b-a482-49c1-9668-283ba53973de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.185554] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74d12a07-e18b-4886-a5ad-3b5da65dea4e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "2392fb5b-a482-49c1-9668-283ba53973de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.215025] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 41f20cb7-c9f9-4201-ae16-4f977dae26cf] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 824.237584] env[62974]: DEBUG nova.network.neutron [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updated VIF entry in instance network info cache for port 07b0aa8b-b38d-489b-9998-6efe6126083f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.237935] env[62974]: DEBUG nova.network.neutron [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.305418] env[62974]: DEBUG nova.compute.manager [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Received event network-changed-8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 824.305418] env[62974]: DEBUG nova.compute.manager [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Refreshing instance network info cache due to event network-changed-8c1e40ea-8afa-424a-9c2d-65f7e1179366. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 824.305418] env[62974]: DEBUG oslo_concurrency.lockutils [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] Acquiring lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.305418] env[62974]: DEBUG oslo_concurrency.lockutils [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] Acquired lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.305418] env[62974]: DEBUG nova.network.neutron [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Refreshing network info cache for port 8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.319190] env[62974]: DEBUG nova.scheduler.client.report [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.500822] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654409, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.537855] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654411, 'name': CreateVM_Task, 'duration_secs': 0.431731} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.537991] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.538688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.538908] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.539251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 824.539506] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38ab58b3-a7fd-4a5c-84c7-790a9c94971d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.544620] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 824.544620] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521b2793-897e-d510-4881-344bcecf409d" [ 824.544620] env[62974]: _type = "Task" [ 824.544620] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.555167] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521b2793-897e-d510-4881-344bcecf409d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.567424] env[62974]: DEBUG oslo_vmware.api [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654412, 'name': PowerOnVM_Task, 'duration_secs': 0.502562} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.567656] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.567874] env[62974]: DEBUG nova.compute.manager [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.568665] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23748ed-aa2d-423f-beb0-c75d90a5a518 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.599445] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.688679] env[62974]: DEBUG nova.compute.manager [None req-74d12a07-e18b-4886-a5ad-3b5da65dea4e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 2392fb5b-a482-49c1-9668-283ba53973de] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 824.719460] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 05742180-08db-45db-9ee0-e359aa8af2f0] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 824.742178] env[62974]: DEBUG oslo_concurrency.lockutils [req-74a6b3ca-a223-49bb-a0b3-2052c8065985 req-b8d5ee8e-33a1-4da8-b205-2aa6a9300780 service nova] Releasing lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.823832] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.736s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.824134] env[62974]: INFO nova.compute.manager [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Migrating [ 824.835417] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.891s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.835645] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.837771] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.650s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.839346] env[62974]: INFO nova.compute.claims [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.872595] env[62974]: INFO nova.scheduler.client.report [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Deleted allocations for instance 0c2642d5-85fe-4db5-9891-025c88ca8c7c [ 825.001926] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654409, 'name': ReconfigVM_Task, 'duration_secs': 0.895843} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.002962] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 14523914-68ab-4d39-8eb8-6a786ddcb4dc/14523914-68ab-4d39-8eb8-6a786ddcb4dc.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.005463] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8eda23ec-559b-4ff8-8fe8-c71bd6da97fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.014380] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 825.014380] env[62974]: value = "task-2654413" [ 825.014380] env[62974]: _type = "Task" [ 825.014380] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.024776] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654413, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.056664] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521b2793-897e-d510-4881-344bcecf409d, 'name': SearchDatastore_Task, 'duration_secs': 0.009848} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.057556] env[62974]: DEBUG nova.network.neutron [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updated VIF entry in instance network info cache for port 8c1e40ea-8afa-424a-9c2d-65f7e1179366. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 825.057891] env[62974]: DEBUG nova.network.neutron [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.059147] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.059379] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.059632] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.059791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.059971] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.060682] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e59f372f-9df9-4dea-9853-1508ca68eb45 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.071362] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.071533] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.072579] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7efd9e5e-6c64-4e60-a532-1fac34e502af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.085025] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 825.085025] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52991f0a-ec79-5bd3-d813-19bf53761be5" [ 825.085025] env[62974]: _type = "Task" [ 825.085025] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.088441] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.098598] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52991f0a-ec79-5bd3-d813-19bf53761be5, 'name': SearchDatastore_Task, 'duration_secs': 0.00864} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.099691] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1e7c0e3-7c54-4f99-ae94-29982c65d18a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.105982] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 825.105982] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5253a7be-0293-a5ef-812d-835da815dc7c" [ 825.105982] env[62974]: _type = "Task" [ 825.105982] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.114548] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5253a7be-0293-a5ef-812d-835da815dc7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.196356] env[62974]: DEBUG nova.compute.manager [None req-74d12a07-e18b-4886-a5ad-3b5da65dea4e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 2392fb5b-a482-49c1-9668-283ba53973de] Instance disappeared before build. {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 825.223113] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 1873faa1-dec2-4d17-a71a-c53fea50c09b] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 825.340707] env[62974]: DEBUG nova.network.neutron [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Successfully updated port: 4c5397a0-f933-4f39-911d-525d8d7e5aac {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.356320] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.356497] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.356683] env[62974]: DEBUG nova.network.neutron [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.380129] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e22282b8-fcef-4336-9762-c4605a2892aa tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "0c2642d5-85fe-4db5-9891-025c88ca8c7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.081s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.405691] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.405992] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.525347] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654413, 'name': Rename_Task, 'duration_secs': 0.421102} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.525347] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.525438] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c45e0774-dd60-49f8-926f-eea2be4cdc58 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.534096] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 825.534096] env[62974]: value = "task-2654414" [ 825.534096] env[62974]: _type = "Task" [ 825.534096] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.540383] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.564582] env[62974]: DEBUG oslo_concurrency.lockutils [req-cadeacd3-9c59-40d1-934c-418a2226ea73 req-c5ff5941-3897-4361-b241-3c94aa9866be service nova] Releasing lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.619233] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5253a7be-0293-a5ef-812d-835da815dc7c, 'name': SearchDatastore_Task, 'duration_secs': 0.009349} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.619510] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.619827] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 55229db9-9442-4973-a1f2-7762227167a4/55229db9-9442-4973-a1f2-7762227167a4.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.620115] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d98647b2-b625-4bdb-bfcf-5671caf59e03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.634984] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 825.634984] env[62974]: value = "task-2654415" [ 825.634984] env[62974]: _type = "Task" [ 825.634984] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.644301] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.713105] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74d12a07-e18b-4886-a5ad-3b5da65dea4e tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "2392fb5b-a482-49c1-9668-283ba53973de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.527s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.725838] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6dc914e9-bce5-4a19-a919-ae94981ea800] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 825.844092] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "refresh_cache-e42547b0-25b7-4a34-b832-b93103065928" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.844668] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "refresh_cache-e42547b0-25b7-4a34-b832-b93103065928" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.844668] env[62974]: DEBUG nova.network.neutron [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.911954] env[62974]: DEBUG nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.049385] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654414, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.104364] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.104596] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.104795] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.104967] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.105144] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.108767] env[62974]: INFO nova.compute.manager [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Terminating instance [ 826.151087] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654415, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.166865] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "6e8f07c2-60da-4bad-a7af-8c83294e232f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.167135] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "6e8f07c2-60da-4bad-a7af-8c83294e232f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.167350] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "6e8f07c2-60da-4bad-a7af-8c83294e232f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.167540] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "6e8f07c2-60da-4bad-a7af-8c83294e232f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.167781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "6e8f07c2-60da-4bad-a7af-8c83294e232f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.170824] env[62974]: INFO nova.compute.manager [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Terminating instance [ 826.230582] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: ecde0e49-c344-4003-b858-8312c1ac344f] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 826.259885] env[62974]: DEBUG nova.network.neutron [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance_info_cache with network_info: [{"id": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "address": "fa:16:3e:5f:6a:8c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8424609-cf", "ovs_interfaceid": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.384172] env[62974]: DEBUG nova.network.neutron [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.441044] env[62974]: DEBUG nova.compute.manager [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Received event network-vif-plugged-4c5397a0-f933-4f39-911d-525d8d7e5aac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 826.441299] env[62974]: DEBUG oslo_concurrency.lockutils [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] Acquiring lock "e42547b0-25b7-4a34-b832-b93103065928-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.441465] env[62974]: DEBUG oslo_concurrency.lockutils [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] Lock "e42547b0-25b7-4a34-b832-b93103065928-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.441636] env[62974]: DEBUG oslo_concurrency.lockutils [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] Lock "e42547b0-25b7-4a34-b832-b93103065928-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.441807] env[62974]: DEBUG nova.compute.manager [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] No waiting events found dispatching network-vif-plugged-4c5397a0-f933-4f39-911d-525d8d7e5aac {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.441972] env[62974]: WARNING nova.compute.manager [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Received unexpected event network-vif-plugged-4c5397a0-f933-4f39-911d-525d8d7e5aac for instance with vm_state building and task_state spawning. [ 826.442265] env[62974]: DEBUG nova.compute.manager [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Received event network-changed-4c5397a0-f933-4f39-911d-525d8d7e5aac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 826.442477] env[62974]: DEBUG nova.compute.manager [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Refreshing instance network info cache due to event network-changed-4c5397a0-f933-4f39-911d-525d8d7e5aac. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 826.442666] env[62974]: DEBUG oslo_concurrency.lockutils [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] Acquiring lock "refresh_cache-e42547b0-25b7-4a34-b832-b93103065928" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.443535] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.463177] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517e243d-c11a-4c8c-8871-c41fa11e9953 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.473324] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420075db-6401-4b28-9226-a47a4d336d2e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.506975] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb22b56-984b-4c29-b348-2838664e8488 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.517270] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53951804-78f1-45ba-8d49-a4b79b6c9ce3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.532630] env[62974]: DEBUG nova.compute.provider_tree [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.541807] env[62974]: DEBUG oslo_vmware.api [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654414, 'name': PowerOnVM_Task, 'duration_secs': 0.765041} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.542075] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.542274] env[62974]: INFO nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Took 7.81 seconds to spawn the instance on the hypervisor. [ 826.542451] env[62974]: DEBUG nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.543197] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ce51ba-e06c-4247-becb-628ff49f45dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.598856] env[62974]: DEBUG nova.network.neutron [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Updating instance_info_cache with network_info: [{"id": "4c5397a0-f933-4f39-911d-525d8d7e5aac", "address": "fa:16:3e:38:06:b9", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c5397a0-f9", "ovs_interfaceid": "4c5397a0-f933-4f39-911d-525d8d7e5aac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.618550] env[62974]: DEBUG nova.compute.manager [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 826.618550] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.618550] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312c4000-267b-4ec2-a5ed-104ed6e93bd8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.626605] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.628036] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd1b7945-f5a6-44bd-acfe-7228bbb918fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.638020] env[62974]: DEBUG oslo_vmware.api [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 826.638020] env[62974]: value = "task-2654416" [ 826.638020] env[62974]: _type = "Task" [ 826.638020] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.652373] env[62974]: DEBUG oslo_vmware.api [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.655931] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654415, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543074} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.656198] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 55229db9-9442-4973-a1f2-7762227167a4/55229db9-9442-4973-a1f2-7762227167a4.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.656412] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.656667] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-411cd3ce-1fde-4ac0-a223-42fd56922d4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.665838] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 826.665838] env[62974]: value = "task-2654417" [ 826.665838] env[62974]: _type = "Task" [ 826.665838] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.675489] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654417, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.677138] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "refresh_cache-6e8f07c2-60da-4bad-a7af-8c83294e232f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.678925] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquired lock "refresh_cache-6e8f07c2-60da-4bad-a7af-8c83294e232f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.678925] env[62974]: DEBUG nova.network.neutron [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.734856] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 586a3541-060f-4859-8507-17faa637b17e] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 826.761770] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.037938] env[62974]: DEBUG nova.scheduler.client.report [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 827.061868] env[62974]: INFO nova.compute.manager [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Took 43.58 seconds to build instance. [ 827.101857] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "refresh_cache-e42547b0-25b7-4a34-b832-b93103065928" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.101857] env[62974]: DEBUG nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Instance network_info: |[{"id": "4c5397a0-f933-4f39-911d-525d8d7e5aac", "address": "fa:16:3e:38:06:b9", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c5397a0-f9", "ovs_interfaceid": "4c5397a0-f933-4f39-911d-525d8d7e5aac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 827.102067] env[62974]: DEBUG oslo_concurrency.lockutils [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] Acquired lock "refresh_cache-e42547b0-25b7-4a34-b832-b93103065928" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.102270] env[62974]: DEBUG nova.network.neutron [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Refreshing network info cache for port 4c5397a0-f933-4f39-911d-525d8d7e5aac {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.103489] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:06:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c5397a0-f933-4f39-911d-525d8d7e5aac', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.111142] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Creating folder: Project (2e0a57dfe83843708e333b70e0cc2bc4). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 827.114583] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f793774a-8a3a-4afc-88e9-e783d6fa486d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.127878] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Created folder: Project (2e0a57dfe83843708e333b70e0cc2bc4) in parent group-v535199. [ 827.127878] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Creating folder: Instances. Parent ref: group-v535388. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 827.128037] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95de934e-eaa3-4432-bdf6-bc7af266db8c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.141495] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Created folder: Instances in parent group-v535388. [ 827.141782] env[62974]: DEBUG oslo.service.loopingcall [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.142387] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e42547b0-25b7-4a34-b832-b93103065928] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.142600] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2de69f70-40a1-4154-8972-8bd921f8e8e0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.164831] env[62974]: DEBUG oslo_vmware.api [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654416, 'name': PowerOffVM_Task, 'duration_secs': 0.276677} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.165621] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.165675] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.165919] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd400dc5-9c95-46e9-89c8-5677633bd51d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.172357] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.172357] env[62974]: value = "task-2654420" [ 827.172357] env[62974]: _type = "Task" [ 827.172357] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.178945] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654417, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087294} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.179328] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.181945] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bbb5cd-b553-4006-9ff8-3cdee2917fe0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.201891] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654420, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.213632] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 55229db9-9442-4973-a1f2-7762227167a4/55229db9-9442-4973-a1f2-7762227167a4.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.214723] env[62974]: DEBUG nova.network.neutron [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.216676] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-761d8a08-d2e2-4a65-8b90-e3b8c97abb16 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.239613] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: f9adcd7e-58a0-433c-8602-cca814b84aaa] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 827.241826] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 827.241826] env[62974]: value = "task-2654422" [ 827.241826] env[62974]: _type = "Task" [ 827.241826] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.252494] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 827.252707] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 827.252890] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Deleting the datastore file [datastore2] 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.256374] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-678c1901-60a1-4364-9182-f60b3aa24b8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.258308] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654422, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.269300] env[62974]: DEBUG oslo_vmware.api [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for the task: (returnval){ [ 827.269300] env[62974]: value = "task-2654423" [ 827.269300] env[62974]: _type = "Task" [ 827.269300] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.280816] env[62974]: DEBUG oslo_vmware.api [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.298372] env[62974]: DEBUG nova.network.neutron [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.300230] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "0bc05477-1802-4f8b-8d23-2742f9baf603" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.300479] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.301331] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "0bc05477-1802-4f8b-8d23-2742f9baf603-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.301331] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.301331] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.305860] env[62974]: INFO nova.compute.manager [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Terminating instance [ 827.543469] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.544042] env[62974]: DEBUG nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 827.546815] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.064s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.546996] env[62974]: DEBUG nova.objects.instance [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 827.550778] env[62974]: DEBUG nova.network.neutron [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Updated VIF entry in instance network info cache for port 4c5397a0-f933-4f39-911d-525d8d7e5aac. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.551214] env[62974]: DEBUG nova.network.neutron [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Updating instance_info_cache with network_info: [{"id": "4c5397a0-f933-4f39-911d-525d8d7e5aac", "address": "fa:16:3e:38:06:b9", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c5397a0-f9", "ovs_interfaceid": "4c5397a0-f933-4f39-911d-525d8d7e5aac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.565194] env[62974]: DEBUG oslo_concurrency.lockutils [None req-14d10459-a26d-4013-bc7d-0201fc15c46c tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.328s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.683848] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654420, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.743319] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 1933bc47-1717-48c1-b4a2-492a17573de7] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 827.755652] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654422, 'name': ReconfigVM_Task, 'duration_secs': 0.312617} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.755978] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 55229db9-9442-4973-a1f2-7762227167a4/55229db9-9442-4973-a1f2-7762227167a4.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.756677] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-583698fa-eb1a-48e7-b578-06850ba8d87c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.764589] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 827.764589] env[62974]: value = "task-2654424" [ 827.764589] env[62974]: _type = "Task" [ 827.764589] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.782767] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654424, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.786526] env[62974]: DEBUG oslo_vmware.api [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Task: {'id': task-2654423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171917} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.786526] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 827.786670] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 827.786973] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.787055] env[62974]: INFO nova.compute.manager [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Took 1.17 seconds to destroy the instance on the hypervisor. [ 827.787255] env[62974]: DEBUG oslo.service.loopingcall [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.787428] env[62974]: DEBUG nova.compute.manager [-] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 827.787528] env[62974]: DEBUG nova.network.neutron [-] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.805030] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Releasing lock "refresh_cache-6e8f07c2-60da-4bad-a7af-8c83294e232f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.805030] env[62974]: DEBUG nova.compute.manager [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 827.805030] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.805822] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7809f366-073e-4cb3-a702-ce9b9baa85f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.811140] env[62974]: DEBUG nova.compute.manager [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 827.811334] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.812144] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e0f79e-dc0f-48e3-a502-c55f548b022b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.821231] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.823675] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4671e5df-68d4-4ff4-bff4-9b91a1417a5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.825529] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.825529] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a890337a-da2d-4c9c-84f1-98271cb2a941 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.833840] env[62974]: DEBUG oslo_vmware.api [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 827.833840] env[62974]: value = "task-2654425" [ 827.833840] env[62974]: _type = "Task" [ 827.833840] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.838240] env[62974]: DEBUG oslo_vmware.api [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 827.838240] env[62974]: value = "task-2654426" [ 827.838240] env[62974]: _type = "Task" [ 827.838240] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.866142] env[62974]: DEBUG oslo_vmware.api [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654425, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.866710] env[62974]: DEBUG oslo_vmware.api [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.054250] env[62974]: DEBUG nova.compute.utils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 828.055893] env[62974]: DEBUG nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 828.062027] env[62974]: DEBUG nova.network.neutron [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 828.065619] env[62974]: DEBUG oslo_concurrency.lockutils [req-4df1e841-cf34-42b5-8ed0-edf916b3f4df req-03ceca50-fcb6-4ad4-b269-1e5541cdc6da service nova] Releasing lock "refresh_cache-e42547b0-25b7-4a34-b832-b93103065928" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.113028] env[62974]: DEBUG nova.policy [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcb0068668124811ab0cd555f828c7df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8152f704e86645a0a7e7e81d9edabf30', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 828.185169] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654420, 'name': CreateVM_Task, 'duration_secs': 0.68344} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.185347] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e42547b0-25b7-4a34-b832-b93103065928] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 828.186042] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.186233] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.186534] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 828.186791] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98625a00-f4e0-4ff9-a2b0-1ca5b9370562 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.192105] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 828.192105] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f5db53-054c-5c8b-9ca4-b75369b32f86" [ 828.192105] env[62974]: _type = "Task" [ 828.192105] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.200992] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f5db53-054c-5c8b-9ca4-b75369b32f86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.250380] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 7f0d367d-9d60-414b-990e-56a2b43fd963] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 828.277717] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654424, 'name': Rename_Task, 'duration_secs': 0.204594} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.280553] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.281306] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aedb010c-fa54-4fe1-923d-015e4f4db04d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.283582] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06771b1f-2679-45d3-ad1e-9d166db2a565 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.303803] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance 'af370de1-e4d7-4312-bc72-c6398eeaf2ed' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 828.313019] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 828.313019] env[62974]: value = "task-2654427" [ 828.313019] env[62974]: _type = "Task" [ 828.313019] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.320305] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654427, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.358668] env[62974]: DEBUG oslo_vmware.api [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654425, 'name': PowerOffVM_Task, 'duration_secs': 0.231487} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.358932] env[62974]: DEBUG oslo_vmware.api [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654426, 'name': PowerOffVM_Task, 'duration_secs': 0.199429} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.363343] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.363599] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.363935] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.364261] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.365239] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-118c0d35-1d05-4ae2-b5c8-9fd2c250ff0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.366846] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34daec3d-0eeb-49c8-996a-f9b565937cb9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.403040] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.403185] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.403755] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Deleting the datastore file [datastore2] 6e8f07c2-60da-4bad-a7af-8c83294e232f {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.403755] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64cd4a29-3c86-4e89-a7e6-d61e27637f5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.411270] env[62974]: DEBUG oslo_vmware.api [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for the task: (returnval){ [ 828.411270] env[62974]: value = "task-2654430" [ 828.411270] env[62974]: _type = "Task" [ 828.411270] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.419982] env[62974]: DEBUG oslo_vmware.api [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.424710] env[62974]: DEBUG nova.network.neutron [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Successfully created port: 1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.447480] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.447716] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.447932] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleting the datastore file [datastore1] 0bc05477-1802-4f8b-8d23-2742f9baf603 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.448222] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5f51584-6faa-426f-bdc4-218d6bdf0038 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.460181] env[62974]: DEBUG oslo_vmware.api [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 828.460181] env[62974]: value = "task-2654431" [ 828.460181] env[62974]: _type = "Task" [ 828.460181] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.469762] env[62974]: DEBUG oslo_vmware.api [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.478378] env[62974]: DEBUG nova.compute.manager [req-88ded524-2f4f-4053-a74a-b0f2c112399f req-8a44d5d9-c997-445c-945b-6686d5c22649 service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Received event network-vif-deleted-1d12bec7-b3ee-4922-b371-3279eb60ac8f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 828.478619] env[62974]: INFO nova.compute.manager [req-88ded524-2f4f-4053-a74a-b0f2c112399f req-8a44d5d9-c997-445c-945b-6686d5c22649 service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Neutron deleted interface 1d12bec7-b3ee-4922-b371-3279eb60ac8f; detaching it from the instance and deleting it from the info cache [ 828.478812] env[62974]: DEBUG nova.network.neutron [req-88ded524-2f4f-4053-a74a-b0f2c112399f req-8a44d5d9-c997-445c-945b-6686d5c22649 service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.567261] env[62974]: DEBUG oslo_concurrency.lockutils [None req-250c17ff-6eb1-4f0d-b063-28d140e85ccb tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.572036] env[62974]: DEBUG nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 828.572036] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.978s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.572962] env[62974]: INFO nova.compute.claims [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.663296] env[62974]: DEBUG nova.network.neutron [-] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.704080] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f5db53-054c-5c8b-9ca4-b75369b32f86, 'name': SearchDatastore_Task, 'duration_secs': 0.010975} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.704417] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.704661] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 828.704931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.705096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.705280] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 828.705592] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dd36601-2f2b-40a0-b46b-882fd0963702 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.716498] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 828.716692] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 828.717461] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48a6c5b5-369d-4cc2-b323-fb46a0ab25e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.725331] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 828.725331] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526811c7-6264-9142-ab48-f4fd8fb4301e" [ 828.725331] env[62974]: _type = "Task" [ 828.725331] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.735619] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526811c7-6264-9142-ab48-f4fd8fb4301e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.754078] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 001557f9-ea50-4e86-9eeb-dd4436791453] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 828.812551] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.812960] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edd84739-6286-43d4-9c69-bb2dacee6a79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.825928] env[62974]: DEBUG oslo_vmware.api [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654427, 'name': PowerOnVM_Task, 'duration_secs': 0.505876} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.827618] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.827834] env[62974]: INFO nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Took 7.40 seconds to spawn the instance on the hypervisor. [ 828.828038] env[62974]: DEBUG nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.828450] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 828.828450] env[62974]: value = "task-2654432" [ 828.828450] env[62974]: _type = "Task" [ 828.828450] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.829322] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a642067-5e01-4fd8-8651-b42cc7329c50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.844158] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.923304] env[62974]: DEBUG oslo_vmware.api [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Task: {'id': task-2654430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148564} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.923628] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.923841] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.924996] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.924996] env[62974]: INFO nova.compute.manager [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 828.924996] env[62974]: DEBUG oslo.service.loopingcall [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.924996] env[62974]: DEBUG nova.compute.manager [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 828.924996] env[62974]: DEBUG nova.network.neutron [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.950256] env[62974]: DEBUG nova.network.neutron [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.973038] env[62974]: DEBUG oslo_vmware.api [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182766} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.973357] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.973553] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.973756] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.973949] env[62974]: INFO nova.compute.manager [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Took 1.16 seconds to destroy the instance on the hypervisor. [ 828.974527] env[62974]: DEBUG oslo.service.loopingcall [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.974527] env[62974]: DEBUG nova.compute.manager [-] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 828.974688] env[62974]: DEBUG nova.network.neutron [-] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.982165] env[62974]: DEBUG nova.compute.manager [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.984454] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de07812-8d13-4722-9bd5-128dfbc65d14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.987354] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c49c8c9-8b05-4814-8234-20e0962632da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.003732] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c80cbc4-6395-4ac8-a177-8dab64992773 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.052033] env[62974]: DEBUG nova.compute.manager [req-88ded524-2f4f-4053-a74a-b0f2c112399f req-8a44d5d9-c997-445c-945b-6686d5c22649 service nova] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Detach interface failed, port_id=1d12bec7-b3ee-4922-b371-3279eb60ac8f, reason: Instance 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 829.166689] env[62974]: INFO nova.compute.manager [-] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Took 1.38 seconds to deallocate network for instance. [ 829.236772] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526811c7-6264-9142-ab48-f4fd8fb4301e, 'name': SearchDatastore_Task, 'duration_secs': 0.020116} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.237674] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5468eee-68a6-4bcf-af91-cc22bd1c5f27 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.244279] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 829.244279] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5292ae5f-6dbf-a7d8-4e28-2645d9bd3217" [ 829.244279] env[62974]: _type = "Task" [ 829.244279] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.253955] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5292ae5f-6dbf-a7d8-4e28-2645d9bd3217, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.257568] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: a63aa120-1c7b-4abc-93cf-4d138f5cebde] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 829.345726] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654432, 'name': PowerOffVM_Task, 'duration_secs': 0.239134} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.346208] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 829.346425] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance 'af370de1-e4d7-4312-bc72-c6398eeaf2ed' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 829.358457] env[62974]: INFO nova.compute.manager [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Took 44.42 seconds to build instance. [ 829.453388] env[62974]: DEBUG nova.network.neutron [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.503050] env[62974]: INFO nova.compute.manager [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] instance snapshotting [ 829.505043] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefb705c-4acb-4e44-9342-65ca0e4e9d44 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.526021] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647e4889-4064-439c-bc14-6cc4f0f7b644 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.583630] env[62974]: DEBUG nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 829.606762] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 829.606964] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.607145] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 829.607329] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.607474] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 829.607622] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 829.607832] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 829.607982] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 829.608161] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 829.608324] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 829.608497] env[62974]: DEBUG nova.virt.hardware [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 829.609378] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b14fa6-76ea-420f-94b4-08994bec2eff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.622030] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a498b8bf-4b8c-4e78-a9d8-d01e332d5fab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.674616] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.758266] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5292ae5f-6dbf-a7d8-4e28-2645d9bd3217, 'name': SearchDatastore_Task, 'duration_secs': 0.036768} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.760104] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.760104] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e42547b0-25b7-4a34-b832-b93103065928/e42547b0-25b7-4a34-b832-b93103065928.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 829.760104] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3d7c725-71c0-42e0-a1fe-86b0f4d28fff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.763995] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 2a498460-fced-410b-8b33-3595a2ac6753] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 829.776021] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 829.776021] env[62974]: value = "task-2654433" [ 829.776021] env[62974]: _type = "Task" [ 829.776021] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.783518] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654433, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.822830] env[62974]: DEBUG nova.network.neutron [-] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.860495] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 829.860941] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.860980] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 829.861233] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.861356] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 829.861510] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 829.861718] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 829.861882] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 829.862061] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 829.862230] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 829.862414] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 829.871099] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f273551-8e77-432a-acc1-25bb237fb514 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.599s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.871337] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca8e25fc-c7f2-4c6d-b643-abbf99283ded {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.892307] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 829.892307] env[62974]: value = "task-2654434" [ 829.892307] env[62974]: _type = "Task" [ 829.892307] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.906967] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654434, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.955972] env[62974]: INFO nova.compute.manager [-] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Took 1.03 seconds to deallocate network for instance. [ 829.972216] env[62974]: DEBUG nova.network.neutron [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Successfully updated port: 1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.036889] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 830.037195] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-be77a695-a4d9-49bc-9313-b1da7ea20a10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.047927] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 830.047927] env[62974]: value = "task-2654435" [ 830.047927] env[62974]: _type = "Task" [ 830.047927] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.064648] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654435, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.156101] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b77239-03a6-4cf1-9b45-633507371462 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.166902] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3154aa5-c0d4-4776-9c06-ca8ae39423e4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.211268] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a506461-9478-4080-af1f-018c82e2434b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.215548] env[62974]: DEBUG nova.compute.manager [req-fd99b0a2-3615-4a0d-a573-792ca232745e req-f1effa8f-03e0-4c79-8c63-1b233360afea service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Received event network-vif-plugged-1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 830.215932] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd99b0a2-3615-4a0d-a573-792ca232745e req-f1effa8f-03e0-4c79-8c63-1b233360afea service nova] Acquiring lock "e23dbff7-d23e-4909-9b33-67ed15c325e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.216291] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd99b0a2-3615-4a0d-a573-792ca232745e req-f1effa8f-03e0-4c79-8c63-1b233360afea service nova] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.216601] env[62974]: DEBUG oslo_concurrency.lockutils [req-fd99b0a2-3615-4a0d-a573-792ca232745e req-f1effa8f-03e0-4c79-8c63-1b233360afea service nova] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.216927] env[62974]: DEBUG nova.compute.manager [req-fd99b0a2-3615-4a0d-a573-792ca232745e req-f1effa8f-03e0-4c79-8c63-1b233360afea service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] No waiting events found dispatching network-vif-plugged-1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 830.217328] env[62974]: WARNING nova.compute.manager [req-fd99b0a2-3615-4a0d-a573-792ca232745e req-f1effa8f-03e0-4c79-8c63-1b233360afea service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Received unexpected event network-vif-plugged-1f4c134a-f095-4872-9ffc-8b90d02f29f9 for instance with vm_state building and task_state spawning. [ 830.225827] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597ae965-1558-4fd3-b626-9362b7b30226 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.246024] env[62974]: DEBUG nova.compute.provider_tree [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.269078] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: a8446718-f2df-4bad-b5e3-537f19daa823] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 830.285111] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654433, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.331023] env[62974]: INFO nova.compute.manager [-] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Took 1.35 seconds to deallocate network for instance. [ 830.405378] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654434, 'name': ReconfigVM_Task, 'duration_secs': 0.175498} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.405879] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance 'af370de1-e4d7-4312-bc72-c6398eeaf2ed' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 830.463553] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.478548] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.478548] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.478548] env[62974]: DEBUG nova.network.neutron [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.557979] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654435, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.710780] env[62974]: DEBUG nova.compute.manager [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Received event network-vif-deleted-70c67ce0-0054-4b7d-886e-7073fb213aa5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 830.710971] env[62974]: DEBUG nova.compute.manager [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Received event network-changed-8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 830.711116] env[62974]: DEBUG nova.compute.manager [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Refreshing instance network info cache due to event network-changed-8c1e40ea-8afa-424a-9c2d-65f7e1179366. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 830.711303] env[62974]: DEBUG oslo_concurrency.lockutils [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] Acquiring lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.711445] env[62974]: DEBUG oslo_concurrency.lockutils [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] Acquired lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.711604] env[62974]: DEBUG nova.network.neutron [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Refreshing network info cache for port 8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.748349] env[62974]: DEBUG nova.scheduler.client.report [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.770619] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 7af90ee3-c0f4-451a-9e51-4ef0fb3d95bc] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 830.785501] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654433, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675746} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.787990] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e42547b0-25b7-4a34-b832-b93103065928/e42547b0-25b7-4a34-b832-b93103065928.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 830.787990] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.787990] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f516e94b-7651-4c06-967b-04856a2c013c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.793397] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 830.793397] env[62974]: value = "task-2654436" [ 830.793397] env[62974]: _type = "Task" [ 830.793397] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.802069] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.836386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.913152] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.913423] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.913656] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.913954] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.914120] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.914334] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.914588] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.914815] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.915047] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.915275] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.915664] env[62974]: DEBUG nova.virt.hardware [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.921453] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Reconfiguring VM instance instance-0000003a to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 830.921802] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb5e51e3-7753-4d6d-b74f-f22d0297a6a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.943906] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 830.943906] env[62974]: value = "task-2654437" [ 830.943906] env[62974]: _type = "Task" [ 830.943906] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.954274] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.009746] env[62974]: DEBUG nova.network.neutron [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.057684] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654435, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.130403] env[62974]: DEBUG nova.network.neutron [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.252842] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.681s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.253456] env[62974]: DEBUG nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.257160] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.851s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.257160] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.258306] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.148s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.258505] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.261028] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.194s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.262732] env[62974]: INFO nova.compute.claims [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.275987] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 2313468e-820f-4fff-bdeb-5d542c94584d] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 831.289828] env[62974]: INFO nova.scheduler.client.report [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleted allocations for instance 5d6a072e-dba7-461d-9d41-8ca003b31102 [ 831.294196] env[62974]: INFO nova.scheduler.client.report [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleted allocations for instance 8621428e-cf42-47a4-82c8-a003c377b257 [ 831.309516] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074923} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.310494] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.313995] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdfa01a-9afc-433b-b25f-791a070a5ab8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.341486] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] e42547b0-25b7-4a34-b832-b93103065928/e42547b0-25b7-4a34-b832-b93103065928.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.342149] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4c9b542-8430-4c47-9e6e-bdf49407fc2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.366957] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 831.366957] env[62974]: value = "task-2654438" [ 831.366957] env[62974]: _type = "Task" [ 831.366957] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.375840] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.456835] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654437, 'name': ReconfigVM_Task, 'duration_secs': 0.440106} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.457138] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Reconfigured VM instance instance-0000003a to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 831.458043] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd6eacd-17a1-4477-a159-8b2c8bd6d40b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.462215] env[62974]: DEBUG nova.network.neutron [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updated VIF entry in instance network info cache for port 8c1e40ea-8afa-424a-9c2d-65f7e1179366. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.462677] env[62974]: DEBUG nova.network.neutron [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.485833] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] af370de1-e4d7-4312-bc72-c6398eeaf2ed/af370de1-e4d7-4312-bc72-c6398eeaf2ed.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.486843] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-004456e8-154e-4f62-9fe9-e8bd73ed5d03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.510084] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 831.510084] env[62974]: value = "task-2654439" [ 831.510084] env[62974]: _type = "Task" [ 831.510084] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.518804] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.560087] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654435, 'name': CreateSnapshot_Task, 'duration_secs': 1.15628} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.560452] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 831.561341] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c856c3c-8b68-4f25-9356-8137ec01ba91 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.633283] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.633625] env[62974]: DEBUG nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Instance network_info: |[{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 831.634059] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:3a:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '163e60bd-32d6-41c5-95e6-2eb10c5c9245', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f4c134a-f095-4872-9ffc-8b90d02f29f9', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.641918] env[62974]: DEBUG oslo.service.loopingcall [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.642538] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.642725] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da5cc1aa-b732-4ed8-b2dd-003e1ba4c0ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.663887] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.663887] env[62974]: value = "task-2654440" [ 831.663887] env[62974]: _type = "Task" [ 831.663887] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.672381] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654440, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.771347] env[62974]: DEBUG nova.compute.utils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 831.773020] env[62974]: DEBUG nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 831.773228] env[62974]: DEBUG nova.network.neutron [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 831.779223] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 8f4faa77-4f18-41da-b8d0-efba799d6ec6] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 831.799423] env[62974]: DEBUG oslo_concurrency.lockutils [None req-19f7764f-e1df-4e30-92b1-2dfdde91e897 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "5d6a072e-dba7-461d-9d41-8ca003b31102" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.934s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.808250] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa33e557-57f5-4af0-85d4-027988da145c tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "8621428e-cf42-47a4-82c8-a003c377b257" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.668s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.816037] env[62974]: DEBUG nova.policy [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d12189f3f8946eead2ca2fdacd9c8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0575ed5c28314e939bf91ea58759bf82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 831.878264] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654438, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.966211] env[62974]: DEBUG oslo_concurrency.lockutils [req-767c8f03-b9ad-4326-ae9b-4a9bf1ec8959 req-0557e471-6855-433b-bf9c-e64f34acb069 service nova] Releasing lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.021862] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654439, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.082382] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 832.082743] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6bf51b12-d054-4c91-b57f-2d84cbbaa294 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.091709] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 832.091709] env[62974]: value = "task-2654441" [ 832.091709] env[62974]: _type = "Task" [ 832.091709] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.102528] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654441, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.124071] env[62974]: DEBUG nova.network.neutron [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Successfully created port: 77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.177974] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654440, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.229093] env[62974]: DEBUG nova.compute.manager [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Received event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 832.229248] env[62974]: DEBUG nova.compute.manager [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing instance network info cache due to event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 832.229488] env[62974]: DEBUG oslo_concurrency.lockutils [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] Acquiring lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.229658] env[62974]: DEBUG oslo_concurrency.lockutils [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] Acquired lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.229839] env[62974]: DEBUG nova.network.neutron [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.277244] env[62974]: DEBUG nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 832.281771] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 2174cb7d-3e73-4529-b9f8-735dd6dfcf4e] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 832.380650] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654438, 'name': ReconfigVM_Task, 'duration_secs': 0.638554} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.380971] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Reconfigured VM instance instance-00000045 to attach disk [datastore1] e42547b0-25b7-4a34-b832-b93103065928/e42547b0-25b7-4a34-b832-b93103065928.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.381641] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8f05e09-91e2-4bfa-b152-9c09aa388e95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.389254] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 832.389254] env[62974]: value = "task-2654442" [ 832.389254] env[62974]: _type = "Task" [ 832.389254] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.402269] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654442, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.525710] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654439, 'name': ReconfigVM_Task, 'duration_secs': 0.586733} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.525994] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Reconfigured VM instance instance-0000003a to attach disk [datastore2] af370de1-e4d7-4312-bc72-c6398eeaf2ed/af370de1-e4d7-4312-bc72-c6398eeaf2ed.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.526290] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance 'af370de1-e4d7-4312-bc72-c6398eeaf2ed' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 832.602743] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654441, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.633516] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.633664] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.633829] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.634023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.634384] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.640640] env[62974]: INFO nova.compute.manager [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Terminating instance [ 832.685178] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654440, 'name': CreateVM_Task, 'duration_secs': 0.535166} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.688989] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.690061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.690362] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.690816] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.691370] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38cfc2f0-4001-4eae-ad05-8c5bf7209de9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.699394] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 832.699394] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d902c2-1037-6b1f-9775-c1d43cfec9a4" [ 832.699394] env[62974]: _type = "Task" [ 832.699394] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.716410] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d902c2-1037-6b1f-9775-c1d43cfec9a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.788787] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 124a02a3-ec9a-4c4f-b8df-1014ce8b8b7d] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 832.863114] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69542a2c-40fa-4874-9450-cec1de0e9be7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.872119] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127b4dfa-eedf-411a-a914-99d093f1b1f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.914415] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461f7160-3d06-4143-b820-890b7974e558 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.929639] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654442, 'name': Rename_Task, 'duration_secs': 0.345082} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.929985] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.931608] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17c9f83-39d0-4c71-985c-d58f027b8b1f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.936349] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7a751b8-7a7f-442e-b436-6dd72042a327 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.951935] env[62974]: DEBUG nova.compute.provider_tree [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.954691] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 832.954691] env[62974]: value = "task-2654443" [ 832.954691] env[62974]: _type = "Task" [ 832.954691] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.967826] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.036407] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7231b538-8216-48a5-9643-7ce41e3f65aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.058118] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32539119-7f3e-4545-808e-38f55117ce19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.076608] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance 'af370de1-e4d7-4312-bc72-c6398eeaf2ed' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 833.105635] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654441, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.109838] env[62974]: DEBUG nova.network.neutron [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updated VIF entry in instance network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.109838] env[62974]: DEBUG nova.network.neutron [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.144585] env[62974]: DEBUG nova.compute.manager [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 833.144896] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.146047] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb536e2-1151-4a2e-9bbf-98f4689f425c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.155064] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.155064] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52cb83b2-e164-4252-859f-12281ba4745b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.163306] env[62974]: DEBUG oslo_vmware.api [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 833.163306] env[62974]: value = "task-2654444" [ 833.163306] env[62974]: _type = "Task" [ 833.163306] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.174150] env[62974]: DEBUG oslo_vmware.api [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654444, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.210940] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d902c2-1037-6b1f-9775-c1d43cfec9a4, 'name': SearchDatastore_Task, 'duration_secs': 0.018351} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.211299] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.211543] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.211787] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.211989] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.212267] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.212571] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f625a29-cea0-45c7-8d48-7b394dbc2698 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.224048] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.224048] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.224048] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac6a2bd3-8a98-4a9e-99e2-648056f1f81f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.229968] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 833.229968] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52665d03-2108-fae7-a104-be7fb1e1d0d3" [ 833.229968] env[62974]: _type = "Task" [ 833.229968] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.238856] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52665d03-2108-fae7-a104-be7fb1e1d0d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.292682] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 572c2c5f-6a24-4532-9c80-d76017e4aaa1] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 833.296037] env[62974]: DEBUG nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 833.331579] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 833.331935] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.332190] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 833.332339] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.332522] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 833.332757] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 833.333251] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 833.333251] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 833.333419] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 833.333588] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 833.333763] env[62974]: DEBUG nova.virt.hardware [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 833.334994] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121d8d05-1adc-4b5c-a363-e751068b9994 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.346657] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56966e21-7508-438c-b568-99305702d9e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.457334] env[62974]: DEBUG nova.scheduler.client.report [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.474706] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654443, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.607199] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654441, 'name': CloneVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.611898] env[62974]: DEBUG oslo_concurrency.lockutils [req-9269c311-789b-49b7-b10d-5643137223f8 req-cd251394-512c-47d7-81ec-3e0627e174a1 service nova] Releasing lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.652144] env[62974]: DEBUG nova.network.neutron [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Successfully updated port: 77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 833.663055] env[62974]: DEBUG nova.network.neutron [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Port f8424609-cf9e-4474-a78b-3d28dbdd7cb0 binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 833.675767] env[62974]: DEBUG oslo_vmware.api [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654444, 'name': PowerOffVM_Task, 'duration_secs': 0.267953} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.676634] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.676890] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.677216] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2521f9c9-75e4-4ebd-a161-352a33258940 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.741411] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52665d03-2108-fae7-a104-be7fb1e1d0d3, 'name': SearchDatastore_Task, 'duration_secs': 0.012362} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.742282] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c91cc258-0c7e-4941-94d4-cf0fee83830b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.748927] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 833.748927] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d08dc2-a457-1a3d-ec67-49326b9f8206" [ 833.748927] env[62974]: _type = "Task" [ 833.748927] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.761451] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d08dc2-a457-1a3d-ec67-49326b9f8206, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.767884] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.768228] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.768486] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleting the datastore file [datastore2] d8b7a39f-ec73-4a87-9b1e-9428ca72f895 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.768798] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b8a7ceb-4792-4fd8-9440-0c5b9dcc5383 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.776396] env[62974]: DEBUG oslo_vmware.api [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for the task: (returnval){ [ 833.776396] env[62974]: value = "task-2654446" [ 833.776396] env[62974]: _type = "Task" [ 833.776396] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.784887] env[62974]: DEBUG oslo_vmware.api [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.800071] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.800373] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Cleaning up deleted instances with incomplete migration {{(pid=62974) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 833.968050] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.968050] env[62974]: DEBUG nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 833.970532] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.783s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.972468] env[62974]: INFO nova.compute.claims [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.984232] env[62974]: DEBUG oslo_vmware.api [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654443, 'name': PowerOnVM_Task, 'duration_secs': 0.56223} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.984858] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.984858] env[62974]: INFO nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Took 9.88 seconds to spawn the instance on the hypervisor. [ 833.985059] env[62974]: DEBUG nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 833.985751] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abd3eae-a45d-4d32-a077-d274ce39f878 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.105411] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654441, 'name': CloneVM_Task, 'duration_secs': 1.5224} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.105816] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Created linked-clone VM from snapshot [ 834.106640] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cfaad5-fe8f-4cfd-a4af-960ed50b6942 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.117876] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Uploading image dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 834.140771] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 834.140771] env[62974]: value = "vm-535393" [ 834.140771] env[62974]: _type = "VirtualMachine" [ 834.140771] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 834.141070] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d6a27bbf-51e9-4f00-bba1-af57d9c910c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.149449] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease: (returnval){ [ 834.149449] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c71747-30f7-a6f9-0421-0c2787b779e1" [ 834.149449] env[62974]: _type = "HttpNfcLease" [ 834.149449] env[62974]: } obtained for exporting VM: (result){ [ 834.149449] env[62974]: value = "vm-535393" [ 834.149449] env[62974]: _type = "VirtualMachine" [ 834.149449] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 834.149682] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the lease: (returnval){ [ 834.149682] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c71747-30f7-a6f9-0421-0c2787b779e1" [ 834.149682] env[62974]: _type = "HttpNfcLease" [ 834.149682] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 834.156186] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.156368] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.156536] env[62974]: DEBUG nova.network.neutron [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.161706] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 834.161706] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c71747-30f7-a6f9-0421-0c2787b779e1" [ 834.161706] env[62974]: _type = "HttpNfcLease" [ 834.161706] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 834.260236] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d08dc2-a457-1a3d-ec67-49326b9f8206, 'name': SearchDatastore_Task, 'duration_secs': 0.027029} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.260328] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.260583] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/e23dbff7-d23e-4909-9b33-67ed15c325e7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.260837] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca9795d7-488e-4e72-aa39-94ecb12f10be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.269555] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 834.269555] env[62974]: value = "task-2654448" [ 834.269555] env[62974]: _type = "Task" [ 834.269555] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.277538] env[62974]: DEBUG nova.compute.manager [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Received event network-vif-plugged-77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 834.277673] env[62974]: DEBUG oslo_concurrency.lockutils [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] Acquiring lock "6243cce3-8611-46fa-8379-e2f3c825c4dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.277853] env[62974]: DEBUG oslo_concurrency.lockutils [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.278039] env[62974]: DEBUG oslo_concurrency.lockutils [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.278208] env[62974]: DEBUG nova.compute.manager [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] No waiting events found dispatching network-vif-plugged-77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.278374] env[62974]: WARNING nova.compute.manager [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Received unexpected event network-vif-plugged-77480bd6-dce2-44cc-9b9b-3987573c454f for instance with vm_state building and task_state spawning. [ 834.278534] env[62974]: DEBUG nova.compute.manager [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Received event network-changed-77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 834.278718] env[62974]: DEBUG nova.compute.manager [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Refreshing instance network info cache due to event network-changed-77480bd6-dce2-44cc-9b9b-3987573c454f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 834.278895] env[62974]: DEBUG oslo_concurrency.lockutils [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] Acquiring lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.282814] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.292714] env[62974]: DEBUG oslo_vmware.api [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Task: {'id': task-2654446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402431} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.292861] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.293055] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 834.293236] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.293401] env[62974]: INFO nova.compute.manager [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Took 1.15 seconds to destroy the instance on the hypervisor. [ 834.293627] env[62974]: DEBUG oslo.service.loopingcall [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.293811] env[62974]: DEBUG nova.compute.manager [-] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 834.293905] env[62974]: DEBUG nova.network.neutron [-] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 834.302397] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 834.439173] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.439527] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.439834] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.440122] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.440366] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.443010] env[62974]: INFO nova.compute.manager [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Terminating instance [ 834.484175] env[62974]: DEBUG nova.compute.utils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.485828] env[62974]: DEBUG nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 834.486017] env[62974]: DEBUG nova.network.neutron [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 834.503580] env[62974]: INFO nova.compute.manager [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Took 48.35 seconds to build instance. [ 834.540025] env[62974]: DEBUG nova.policy [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a006166df7ec442d834a6b3094875125', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4e071d2e6ef4b928dd40ea5b8f81fe6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 834.576121] env[62974]: DEBUG nova.compute.manager [req-56032d55-80f6-4e28-80b7-c03f090598fc req-b2388440-e5b0-49b8-9f5d-de0761ad7ad4 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Received event network-vif-deleted-947659a6-f0ce-4065-a591-6a15666e4ac5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 834.576341] env[62974]: INFO nova.compute.manager [req-56032d55-80f6-4e28-80b7-c03f090598fc req-b2388440-e5b0-49b8-9f5d-de0761ad7ad4 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Neutron deleted interface 947659a6-f0ce-4065-a591-6a15666e4ac5; detaching it from the instance and deleting it from the info cache [ 834.576517] env[62974]: DEBUG nova.network.neutron [req-56032d55-80f6-4e28-80b7-c03f090598fc req-b2388440-e5b0-49b8-9f5d-de0761ad7ad4 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.581106] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "1c7fabf7-ba82-4628-9016-b0f198add99a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.581347] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.581552] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "1c7fabf7-ba82-4628-9016-b0f198add99a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.581738] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.581907] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.583930] env[62974]: INFO nova.compute.manager [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Terminating instance [ 834.661547] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 834.661547] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c71747-30f7-a6f9-0421-0c2787b779e1" [ 834.661547] env[62974]: _type = "HttpNfcLease" [ 834.661547] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 834.662703] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 834.662703] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c71747-30f7-a6f9-0421-0c2787b779e1" [ 834.662703] env[62974]: _type = "HttpNfcLease" [ 834.662703] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 834.663292] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca50776f-117f-4c7a-82f4-d4f3e11e1c93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.679858] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f60484-fc75-053a-4cc9-27941832de3a/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 834.680111] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f60484-fc75-053a-4cc9-27941832de3a/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 834.717623] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.717623] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.717623] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.760560] env[62974]: DEBUG nova.network.neutron [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.781700] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654448, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.816498] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a65db863-e8bc-45f3-8dd4-c1c0c904dd7f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.880375] env[62974]: DEBUG nova.network.neutron [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Successfully created port: 5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.947249] env[62974]: DEBUG nova.compute.manager [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.947896] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.948751] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2567450-069c-4cb9-918b-488f09b19b70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.965064] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.965064] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8afbe555-3da3-4352-95fa-2e305d23db9a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.972864] env[62974]: DEBUG oslo_vmware.api [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 834.972864] env[62974]: value = "task-2654449" [ 834.972864] env[62974]: _type = "Task" [ 834.972864] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.983017] env[62974]: DEBUG oslo_vmware.api [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.991454] env[62974]: DEBUG nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 835.005531] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c7f20ae4-dd03-400d-ac1f-9c1bc6fd77ce tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "e42547b0-25b7-4a34-b832-b93103065928" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.909s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.053571] env[62974]: DEBUG nova.network.neutron [-] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.063130] env[62974]: DEBUG nova.network.neutron [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updating instance_info_cache with network_info: [{"id": "77480bd6-dce2-44cc-9b9b-3987573c454f", "address": "fa:16:3e:8f:dc:8c", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77480bd6-dc", "ovs_interfaceid": "77480bd6-dce2-44cc-9b9b-3987573c454f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.081762] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9a13fcc-b526-439a-b89a-dae26e6b649f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.088093] env[62974]: DEBUG nova.compute.manager [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 835.088333] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.089543] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2caa379-4ba6-449f-97f9-bf6c1d2b54d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.099580] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3ee247-893c-4c4c-b14f-b5fa87035504 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.114332] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.115029] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d642b4bf-f0ac-49da-a3bd-33bee259c9f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.122651] env[62974]: DEBUG oslo_vmware.api [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 835.122651] env[62974]: value = "task-2654450" [ 835.122651] env[62974]: _type = "Task" [ 835.122651] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.133842] env[62974]: DEBUG oslo_vmware.api [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.156288] env[62974]: DEBUG nova.compute.manager [req-56032d55-80f6-4e28-80b7-c03f090598fc req-b2388440-e5b0-49b8-9f5d-de0761ad7ad4 service nova] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Detach interface failed, port_id=947659a6-f0ce-4065-a591-6a15666e4ac5, reason: Instance d8b7a39f-ec73-4a87-9b1e-9428ca72f895 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 835.281285] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640162} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.284761] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/e23dbff7-d23e-4909-9b33-67ed15c325e7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.285440] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.287067] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4afd08ad-0bf1-4642-b1ae-a2c04d94fe75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.296594] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 835.296594] env[62974]: value = "task-2654451" [ 835.296594] env[62974]: _type = "Task" [ 835.296594] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.308527] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.485203] env[62974]: DEBUG oslo_vmware.api [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654449, 'name': PowerOffVM_Task, 'duration_secs': 0.335836} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.485737] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.486076] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.487224] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-375e797b-205b-4462-8b65-320693735841 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.557525] env[62974]: INFO nova.compute.manager [-] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Took 1.26 seconds to deallocate network for instance. [ 835.574447] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Releasing lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.574447] env[62974]: DEBUG nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Instance network_info: |[{"id": "77480bd6-dce2-44cc-9b9b-3987573c454f", "address": "fa:16:3e:8f:dc:8c", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77480bd6-dc", "ovs_interfaceid": "77480bd6-dce2-44cc-9b9b-3987573c454f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 835.574704] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.574704] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.574704] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleting the datastore file [datastore1] d6ce3f68-a757-48bc-abeb-49c3aacdf465 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.574888] env[62974]: DEBUG oslo_concurrency.lockutils [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] Acquired lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.575042] env[62974]: DEBUG nova.network.neutron [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Refreshing network info cache for port 77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.576232] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:dc:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d859f07-052d-4a69-bdf1-24261a6a6daa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77480bd6-dce2-44cc-9b9b-3987573c454f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 835.584531] env[62974]: DEBUG oslo.service.loopingcall [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.584873] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ec042cd-9875-491e-860e-475443cd1195 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.591250] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 835.591469] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b23266e5-54b2-4176-8f6e-1c25151f4e15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.615233] env[62974]: DEBUG oslo_vmware.api [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 835.615233] env[62974]: value = "task-2654453" [ 835.615233] env[62974]: _type = "Task" [ 835.615233] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.620985] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 835.620985] env[62974]: value = "task-2654454" [ 835.620985] env[62974]: _type = "Task" [ 835.620985] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.640625] env[62974]: DEBUG oslo_vmware.api [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.648034] env[62974]: DEBUG oslo_vmware.api [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654450, 'name': PowerOffVM_Task, 'duration_secs': 0.266284} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.648034] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654454, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.651095] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.651438] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.652325] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9530098-aea6-4acd-b8f4-b73f62a02ac8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.663850] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0cd67a-f6fc-4547-89cd-c373f907413d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.672893] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6a5b68-30ac-48d7-aaec-65ec0cf1765e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.718033] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ac86b1-197b-4d4a-a41e-609749c9079c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.728434] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93a142c-3734-4ee0-b4c2-da760c0b6421 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.747396] env[62974]: DEBUG nova.compute.provider_tree [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.751754] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.751754] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.751754] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleting the datastore file [datastore1] 1c7fabf7-ba82-4628-9016-b0f198add99a {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.751754] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc545f99-8242-46cf-a1b9-5d02f62468e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.759108] env[62974]: DEBUG oslo_vmware.api [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for the task: (returnval){ [ 835.759108] env[62974]: value = "task-2654456" [ 835.759108] env[62974]: _type = "Task" [ 835.759108] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.769247] env[62974]: DEBUG oslo_vmware.api [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.810429] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077739} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.811081] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.812206] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d32974-098d-4abd-91ea-610c39044969 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.838730] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/e23dbff7-d23e-4909-9b33-67ed15c325e7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.840076] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.840489] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.840751] env[62974]: DEBUG nova.network.neutron [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.843249] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d9ba6d3-8697-4d99-84a7-c1a9d22a5cbd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.865615] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 835.865615] env[62974]: value = "task-2654457" [ 835.865615] env[62974]: _type = "Task" [ 835.865615] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.880157] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654457, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.005833] env[62974]: DEBUG nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 836.041982] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 836.041982] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.041982] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.042350] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.042350] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.042350] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 836.042456] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 836.042609] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 836.042947] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 836.043219] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 836.043829] env[62974]: DEBUG nova.virt.hardware [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 836.045223] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752f114f-b29d-4746-bfaf-214f05a114d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.055984] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86977a5-8cd4-4f3c-a8af-2bb6202e3c69 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.073219] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.127292] env[62974]: DEBUG oslo_vmware.api [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.358654} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.130934] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.131546] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.131752] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.132012] env[62974]: INFO nova.compute.manager [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Took 1.18 seconds to destroy the instance on the hypervisor. [ 836.133402] env[62974]: DEBUG oslo.service.loopingcall [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.136162] env[62974]: DEBUG nova.compute.manager [-] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.136281] env[62974]: DEBUG nova.network.neutron [-] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.146522] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654454, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.253374] env[62974]: DEBUG nova.scheduler.client.report [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.277314] env[62974]: DEBUG oslo_vmware.api [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Task: {'id': task-2654456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280898} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.278788] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.278935] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.279248] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.279546] env[62974]: INFO nova.compute.manager [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 836.279973] env[62974]: DEBUG oslo.service.loopingcall [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.280592] env[62974]: DEBUG nova.compute.manager [-] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.280749] env[62974]: DEBUG nova.network.neutron [-] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.382221] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654457, 'name': ReconfigVM_Task, 'duration_secs': 0.434165} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.382390] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfigured VM instance instance-00000046 to attach disk [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/e23dbff7-d23e-4909-9b33-67ed15c325e7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.383638] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30ba0af9-9995-44b6-abb9-6158cdfb346c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.393542] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 836.393542] env[62974]: value = "task-2654458" [ 836.393542] env[62974]: _type = "Task" [ 836.393542] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.404093] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654458, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.622097] env[62974]: DEBUG nova.network.neutron [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updated VIF entry in instance network info cache for port 77480bd6-dce2-44cc-9b9b-3987573c454f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 836.622525] env[62974]: DEBUG nova.network.neutron [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updating instance_info_cache with network_info: [{"id": "77480bd6-dce2-44cc-9b9b-3987573c454f", "address": "fa:16:3e:8f:dc:8c", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77480bd6-dc", "ovs_interfaceid": "77480bd6-dce2-44cc-9b9b-3987573c454f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.638381] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654454, 'name': CreateVM_Task, 'duration_secs': 0.581884} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.639434] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.641270] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.643132] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.643132] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.643132] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6ed2fc3-2480-4a27-be32-72c2b487b448 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.650022] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 836.650022] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a21967-0757-ecf3-ff4e-3fe0f7dbf92d" [ 836.650022] env[62974]: _type = "Task" [ 836.650022] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.659526] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a21967-0757-ecf3-ff4e-3fe0f7dbf92d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.757446] env[62974]: DEBUG nova.compute.manager [req-d665faf2-14ea-412f-8cbb-3d4d9b784770 req-5dbd38be-0c59-43b9-b2bd-2e11b9a24175 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Received event network-vif-plugged-5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 836.757673] env[62974]: DEBUG oslo_concurrency.lockutils [req-d665faf2-14ea-412f-8cbb-3d4d9b784770 req-5dbd38be-0c59-43b9-b2bd-2e11b9a24175 service nova] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.757884] env[62974]: DEBUG oslo_concurrency.lockutils [req-d665faf2-14ea-412f-8cbb-3d4d9b784770 req-5dbd38be-0c59-43b9-b2bd-2e11b9a24175 service nova] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.758063] env[62974]: DEBUG oslo_concurrency.lockutils [req-d665faf2-14ea-412f-8cbb-3d4d9b784770 req-5dbd38be-0c59-43b9-b2bd-2e11b9a24175 service nova] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.758278] env[62974]: DEBUG nova.compute.manager [req-d665faf2-14ea-412f-8cbb-3d4d9b784770 req-5dbd38be-0c59-43b9-b2bd-2e11b9a24175 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] No waiting events found dispatching network-vif-plugged-5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 836.758401] env[62974]: WARNING nova.compute.manager [req-d665faf2-14ea-412f-8cbb-3d4d9b784770 req-5dbd38be-0c59-43b9-b2bd-2e11b9a24175 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Received unexpected event network-vif-plugged-5ef50dc0-edb6-41e4-b27b-22e996c326b4 for instance with vm_state building and task_state spawning. [ 836.760814] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.790s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.761317] env[62974]: DEBUG nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.764126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.405s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.764318] env[62974]: DEBUG nova.objects.instance [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 836.847292] env[62974]: DEBUG nova.network.neutron [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance_info_cache with network_info: [{"id": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "address": "fa:16:3e:5f:6a:8c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8424609-cf", "ovs_interfaceid": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.905289] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654458, 'name': Rename_Task, 'duration_secs': 0.212921} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.905798] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.905897] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86e667cb-7669-4b39-8ea9-806772ee0a08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.913842] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 836.913842] env[62974]: value = "task-2654459" [ 836.913842] env[62974]: _type = "Task" [ 836.913842] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.923585] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654459, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.117429] env[62974]: DEBUG nova.network.neutron [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Successfully updated port: 5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.128970] env[62974]: DEBUG oslo_concurrency.lockutils [req-ac1a0d6c-c942-4500-8d22-95803edfc6e3 req-9a616a9f-747d-4954-8ad3-1e857f8b0b9a service nova] Releasing lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.162833] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a21967-0757-ecf3-ff4e-3fe0f7dbf92d, 'name': SearchDatastore_Task, 'duration_secs': 0.01789} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.163359] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.164095] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.164269] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.164493] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.164764] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.165124] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f7e153e-8d57-45e8-a95d-26b95c354aae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.185338] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.185536] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.186368] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e5b4f88-6c41-4f86-8ed8-b65f24f0da1c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.193261] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 837.193261] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523506b6-5eac-80ab-70f2-d5bb82d4c04f" [ 837.193261] env[62974]: _type = "Task" [ 837.193261] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.204686] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523506b6-5eac-80ab-70f2-d5bb82d4c04f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.270040] env[62974]: DEBUG nova.compute.utils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.275354] env[62974]: DEBUG nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 837.275449] env[62974]: DEBUG nova.network.neutron [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.314605] env[62974]: DEBUG nova.policy [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b837770f3f74a5fad99c7cc150e9cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '567f64e735384503b6c0172050bdfaf5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.354079] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.431735] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654459, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.512032] env[62974]: DEBUG nova.network.neutron [-] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.582147] env[62974]: DEBUG nova.network.neutron [-] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.616788] env[62974]: DEBUG nova.network.neutron [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Successfully created port: 9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.619668] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.620214] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.620214] env[62974]: DEBUG nova.network.neutron [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.704624] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523506b6-5eac-80ab-70f2-d5bb82d4c04f, 'name': SearchDatastore_Task, 'duration_secs': 0.014802} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.705466] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3364ead6-5c79-41e5-91e2-921e0a88372f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.712571] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 837.712571] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527b637d-efb3-075a-77c5-cb756b051be4" [ 837.712571] env[62974]: _type = "Task" [ 837.712571] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.721657] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527b637d-efb3-075a-77c5-cb756b051be4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.774984] env[62974]: DEBUG nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 837.780765] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2b161c52-de20-49c1-81de-c53c7678bc15 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.788201] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.296s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.789769] env[62974]: INFO nova.compute.claims [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.887476] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683a33d5-46b4-48be-ab70-20e90b915438 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.926779] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8b738c-0610-4727-8f83-84690fed1e98 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.939269] env[62974]: DEBUG oslo_vmware.api [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654459, 'name': PowerOnVM_Task, 'duration_secs': 0.552303} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.943112] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.943450] env[62974]: INFO nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Took 8.36 seconds to spawn the instance on the hypervisor. [ 837.943680] env[62974]: DEBUG nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.944438] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance 'af370de1-e4d7-4312-bc72-c6398eeaf2ed' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 837.950860] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a25447c-2f2e-43b8-a2f6-5c13fb6f8e8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.015241] env[62974]: INFO nova.compute.manager [-] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Took 1.88 seconds to deallocate network for instance. [ 838.083488] env[62974]: INFO nova.compute.manager [-] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Took 1.80 seconds to deallocate network for instance. [ 838.157819] env[62974]: DEBUG nova.network.neutron [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.226051] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527b637d-efb3-075a-77c5-cb756b051be4, 'name': SearchDatastore_Task, 'duration_secs': 0.025656} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.226356] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.226613] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6243cce3-8611-46fa-8379-e2f3c825c4dd/6243cce3-8611-46fa-8379-e2f3c825c4dd.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.226895] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67e52c03-cb2b-4c56-b779-ff4333ca6301 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.244955] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 838.244955] env[62974]: value = "task-2654460" [ 838.244955] env[62974]: _type = "Task" [ 838.244955] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.255363] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.296894] env[62974]: DEBUG nova.compute.manager [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.304587] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351eda77-96f5-4ce7-b9d3-fd8a9d9d80cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.358297] env[62974]: DEBUG nova.network.neutron [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating instance_info_cache with network_info: [{"id": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "address": "fa:16:3e:9f:c5:d4", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ef50dc0-ed", "ovs_interfaceid": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.458263] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.458737] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4fe9c63-1b8b-4c24-bf37-acaad417edfe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.474071] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 838.474071] env[62974]: value = "task-2654461" [ 838.474071] env[62974]: _type = "Task" [ 838.474071] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.481219] env[62974]: INFO nova.compute.manager [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Took 46.31 seconds to build instance. [ 838.493790] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.525608] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.591498] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.760017] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512871} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.760017] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6243cce3-8611-46fa-8379-e2f3c825c4dd/6243cce3-8611-46fa-8379-e2f3c825c4dd.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.760017] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.760017] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fa51fe2-557b-4de8-8151-f9c24f35e52b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.769370] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 838.769370] env[62974]: value = "task-2654462" [ 838.769370] env[62974]: _type = "Task" [ 838.769370] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.780558] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.791103] env[62974]: DEBUG nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.821392] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.821636] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.821820] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.822128] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.822296] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.822465] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.822689] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.822846] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.823038] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.823260] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.823446] env[62974]: DEBUG nova.virt.hardware [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.824511] env[62974]: INFO nova.compute.manager [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] instance snapshotting [ 838.827593] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31ad298-bfd7-494d-ad50-5288a281dd65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.831820] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebdb6cd-a822-4416-bc44-e6c56b0495d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.859993] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f535d1-fdd4-4761-aed7-69bf51671ff2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.866176] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.866485] env[62974]: DEBUG nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Instance network_info: |[{"id": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "address": "fa:16:3e:9f:c5:d4", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ef50dc0-ed", "ovs_interfaceid": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 838.867420] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e624d1b-5ff9-4032-9dd2-36a21538e428 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.870526] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:c5:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ef50dc0-edb6-41e4-b27b-22e996c326b4', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.879335] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Creating folder: Project (f4e071d2e6ef4b928dd40ea5b8f81fe6). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.880247] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b6e4807-42bb-4293-a642-9a9212fb7b9f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.906472] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Created folder: Project (f4e071d2e6ef4b928dd40ea5b8f81fe6) in parent group-v535199. [ 838.907129] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Creating folder: Instances. Parent ref: group-v535395. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.907271] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ead5e090-dc14-442b-9193-9d641206abce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.921536] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Created folder: Instances in parent group-v535395. [ 838.921950] env[62974]: DEBUG oslo.service.loopingcall [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.922285] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.922693] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-075d9284-97e3-4725-b892-fabf2b0477de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.950620] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.950620] env[62974]: value = "task-2654465" [ 838.950620] env[62974]: _type = "Task" [ 838.950620] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.962064] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654465, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.984503] env[62974]: DEBUG oslo_concurrency.lockutils [None req-38f00636-7eeb-47e8-881f-ab6f407fb2a7 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.152s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.991450] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654461, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.103139] env[62974]: DEBUG nova.compute.manager [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Received event network-vif-deleted-a4dcb530-120c-4113-98f3-faa4e893012d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 839.103258] env[62974]: DEBUG nova.compute.manager [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Received event network-vif-deleted-cc08e424-4db8-4277-a8b3-2b77913828ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 839.103376] env[62974]: DEBUG nova.compute.manager [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Received event network-changed-5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 839.103541] env[62974]: DEBUG nova.compute.manager [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Refreshing instance network info cache due to event network-changed-5ef50dc0-edb6-41e4-b27b-22e996c326b4. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 839.103785] env[62974]: DEBUG oslo_concurrency.lockutils [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] Acquiring lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.103925] env[62974]: DEBUG oslo_concurrency.lockutils [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] Acquired lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.104115] env[62974]: DEBUG nova.network.neutron [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Refreshing network info cache for port 5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.187128] env[62974]: DEBUG nova.network.neutron [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Successfully updated port: 9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.219895] env[62974]: DEBUG nova.compute.manager [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Received event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 839.220255] env[62974]: DEBUG nova.compute.manager [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing instance network info cache due to event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 839.220483] env[62974]: DEBUG oslo_concurrency.lockutils [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] Acquiring lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.220627] env[62974]: DEBUG oslo_concurrency.lockutils [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] Acquired lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.220784] env[62974]: DEBUG nova.network.neutron [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.281910] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070623} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.282585] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.285791] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3479235-ca3b-4f38-8306-6842e58dbb9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.312859] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 6243cce3-8611-46fa-8379-e2f3c825c4dd/6243cce3-8611-46fa-8379-e2f3c825c4dd.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.315844] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b019c794-1c59-49a5-8144-76d8a288ae13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.338420] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 839.338420] env[62974]: value = "task-2654466" [ 839.338420] env[62974]: _type = "Task" [ 839.338420] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.347599] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.400424] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 839.400899] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-27d7939d-505d-49f7-b9a4-ebcf7a17aaec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.409478] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 839.409478] env[62974]: value = "task-2654467" [ 839.409478] env[62974]: _type = "Task" [ 839.409478] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.422269] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654467, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.460024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20460b2c-f6b8-47d5-8f7a-8d4c6925c881 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.463095] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654465, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.468805] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8f4bcd-be89-4ae8-b2d3-3a1eeafabba5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.505267] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d52406e-7a6f-4b86-99e6-2aa537bf735e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.515976] env[62974]: DEBUG oslo_vmware.api [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654461, 'name': PowerOnVM_Task, 'duration_secs': 0.590048} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.518262] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.518481] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9ecaeda3-86d2-4b52-8cf2-379e4cec3eab tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance 'af370de1-e4d7-4312-bc72-c6398eeaf2ed' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 839.523648] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ec0701-71cc-4687-9eff-b378a540468b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.540196] env[62974]: DEBUG nova.compute.provider_tree [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.690084] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.690275] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.690481] env[62974]: DEBUG nova.network.neutron [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.852250] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654466, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.893218] env[62974]: DEBUG nova.network.neutron [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updated VIF entry in instance network info cache for port 5ef50dc0-edb6-41e4-b27b-22e996c326b4. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.893588] env[62974]: DEBUG nova.network.neutron [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating instance_info_cache with network_info: [{"id": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "address": "fa:16:3e:9f:c5:d4", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ef50dc0-ed", "ovs_interfaceid": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.921111] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654467, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.967582] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654465, 'name': CreateVM_Task, 'duration_secs': 0.746499} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.967892] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.968871] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.969065] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.969535] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.969907] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-190d8224-611a-4a1e-b314-e9f9148a9713 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.977183] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 839.977183] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52848960-bd2d-52e7-225a-f2d3429e65c9" [ 839.977183] env[62974]: _type = "Task" [ 839.977183] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.995810] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52848960-bd2d-52e7-225a-f2d3429e65c9, 'name': SearchDatastore_Task, 'duration_secs': 0.013601} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.996341] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.996736] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.997261] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.997390] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.997679] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.998454] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5462170e-7e56-4adf-ae1a-3084e50ced26 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.024315] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.024517] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.025335] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d210fd6-6301-4365-833f-37d7485f069b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.028853] env[62974]: DEBUG nova.network.neutron [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updated VIF entry in instance network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.029264] env[62974]: DEBUG nova.network.neutron [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.039586] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 840.039586] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5285d860-d2a8-d7f8-912a-505f2aee4381" [ 840.039586] env[62974]: _type = "Task" [ 840.039586] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.045318] env[62974]: DEBUG nova.scheduler.client.report [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.057157] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5285d860-d2a8-d7f8-912a-505f2aee4381, 'name': SearchDatastore_Task, 'duration_secs': 0.012813} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.058194] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a98049d0-d2df-46de-bc2b-b8658137b3ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.065960] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 840.065960] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527f9fc9-029f-cd2f-441a-7d390ee1fd0f" [ 840.065960] env[62974]: _type = "Task" [ 840.065960] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.076027] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527f9fc9-029f-cd2f-441a-7d390ee1fd0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.227202] env[62974]: DEBUG nova.network.neutron [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.352053] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654466, 'name': ReconfigVM_Task, 'duration_secs': 0.755742} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.352371] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 6243cce3-8611-46fa-8379-e2f3c825c4dd/6243cce3-8611-46fa-8379-e2f3c825c4dd.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.353024] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b6ade30-ffb7-4368-ac21-a91a259f13ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.363168] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 840.363168] env[62974]: value = "task-2654468" [ 840.363168] env[62974]: _type = "Task" [ 840.363168] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.370974] env[62974]: DEBUG nova.network.neutron [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.380121] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654468, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.396383] env[62974]: DEBUG oslo_concurrency.lockutils [req-3d503394-37fa-4f87-88c4-1e77292395ea req-3b6b2d51-dd84-4c83-988c-c6ab733e6510 service nova] Releasing lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.422788] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654467, 'name': CreateSnapshot_Task, 'duration_secs': 1.002431} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.423204] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 840.424016] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6793741-5e4a-4547-b006-e8df522beafe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.534811] env[62974]: DEBUG oslo_concurrency.lockutils [req-a6e3d693-13ee-4b10-b738-58ca490470e5 req-d2e53423-7941-4594-b907-b43f3c53cd41 service nova] Releasing lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.551557] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.552111] env[62974]: DEBUG nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 840.554774] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.049s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.554986] env[62974]: DEBUG nova.objects.instance [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lazy-loading 'resources' on Instance uuid 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 840.579076] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527f9fc9-029f-cd2f-441a-7d390ee1fd0f, 'name': SearchDatastore_Task, 'duration_secs': 0.012706} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.579841] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.579841] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c1d0b90c-aa1c-485d-850d-a1495feac7c9/c1d0b90c-aa1c-485d-850d-a1495feac7c9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.579841] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5398260-acf5-4da7-809d-4c9b2470a16d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.590581] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 840.590581] env[62974]: value = "task-2654469" [ 840.590581] env[62974]: _type = "Task" [ 840.590581] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.601995] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.874259] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.874575] env[62974]: DEBUG nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Instance network_info: |[{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 840.874872] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654468, 'name': Rename_Task, 'duration_secs': 0.285903} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.875782] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:64:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a104751-f775-4505-a6de-a82f22b2127c', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.884250] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Creating folder: Project (567f64e735384503b6c0172050bdfaf5). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.884584] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.884863] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b133fdb-c75e-4f28-993d-b63816b80253 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.886817] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eec374d9-6fb1-4b02-a8b3-edfe8fa45731 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.898732] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 840.898732] env[62974]: value = "task-2654470" [ 840.898732] env[62974]: _type = "Task" [ 840.898732] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.903832] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Created folder: Project (567f64e735384503b6c0172050bdfaf5) in parent group-v535199. [ 840.903977] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Creating folder: Instances. Parent ref: group-v535399. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.904629] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8c38968-72f7-4c82-a510-230def40f221 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.909404] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654470, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.919246] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Created folder: Instances in parent group-v535399. [ 840.919495] env[62974]: DEBUG oslo.service.loopingcall [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.920104] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.920377] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66b55b69-c2c0-4a1b-922f-f64091a0fbff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.943822] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 840.944585] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-626b8ae9-28c8-4693-aead-a496398a29a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.956073] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.956073] env[62974]: value = "task-2654474" [ 840.956073] env[62974]: _type = "Task" [ 840.956073] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.956454] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 840.956454] env[62974]: value = "task-2654473" [ 840.956454] env[62974]: _type = "Task" [ 840.956454] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.971198] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654473, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.974717] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654474, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.061480] env[62974]: DEBUG nova.compute.utils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.065968] env[62974]: DEBUG nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 841.066432] env[62974]: DEBUG nova.network.neutron [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.109608] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654469, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.132692] env[62974]: DEBUG nova.compute.manager [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Received event network-vif-plugged-9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 841.132942] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.135020] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] Lock "514e0f15-f27d-4fab-9107-b92884075420-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.135020] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] Lock "514e0f15-f27d-4fab-9107-b92884075420-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.135020] env[62974]: DEBUG nova.compute.manager [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] No waiting events found dispatching network-vif-plugged-9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 841.135020] env[62974]: WARNING nova.compute.manager [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Received unexpected event network-vif-plugged-9a104751-f775-4505-a6de-a82f22b2127c for instance with vm_state building and task_state spawning. [ 841.135020] env[62974]: DEBUG nova.compute.manager [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Received event network-changed-9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 841.135428] env[62974]: DEBUG nova.compute.manager [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Refreshing instance network info cache due to event network-changed-9a104751-f775-4505-a6de-a82f22b2127c. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 841.135428] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.135428] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.135428] env[62974]: DEBUG nova.network.neutron [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Refreshing network info cache for port 9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.137824] env[62974]: DEBUG nova.policy [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49d8e3a243d346e8969ba6f325e7787e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9087d01b1ad748e0a66474953dfe7034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 841.292790] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.292790] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.292790] env[62974]: DEBUG nova.compute.manager [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Going to confirm migration 3 {{(pid=62974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 841.415105] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654470, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.474099] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654473, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.479426] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654474, 'name': CreateVM_Task, 'duration_secs': 0.506855} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.480792] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 841.482185] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.482330] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.482676] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.482948] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5cf7437-01f1-4137-80d2-688c52f9025e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.489865] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 841.489865] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d8ad4f-5314-dea7-f855-ee821006a1eb" [ 841.489865] env[62974]: _type = "Task" [ 841.489865] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.502434] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d8ad4f-5314-dea7-f855-ee821006a1eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.521292] env[62974]: DEBUG nova.network.neutron [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Successfully created port: 1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.567487] env[62974]: DEBUG nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 841.605504] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744895} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.608664] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c1d0b90c-aa1c-485d-850d-a1495feac7c9/c1d0b90c-aa1c-485d-850d-a1495feac7c9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.608945] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.609538] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46860c3d-19a5-42ff-abd5-40c891c44cca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.618848] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 841.618848] env[62974]: value = "task-2654475" [ 841.618848] env[62974]: _type = "Task" [ 841.618848] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.636121] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654475, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.665120] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374a0ce1-f130-422a-bd23-c4ad20d2be38 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.674436] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13da8eba-913d-438c-83a5-cee36d3decaf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.713701] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c56586-c1fb-468e-98ae-6e54453f4d74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.723825] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb737e82-87ea-46b5-9624-897bbbb2a4da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.747710] env[62974]: DEBUG nova.compute.provider_tree [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.910833] env[62974]: DEBUG oslo_vmware.api [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654470, 'name': PowerOnVM_Task, 'duration_secs': 0.945551} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.911171] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.911401] env[62974]: INFO nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Took 8.62 seconds to spawn the instance on the hypervisor. [ 841.911585] env[62974]: DEBUG nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 841.912529] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfea0cc-a52c-4776-a2fb-746d1ac586a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.952368] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.952368] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.952368] env[62974]: DEBUG nova.network.neutron [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.952368] env[62974]: DEBUG nova.objects.instance [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lazy-loading 'info_cache' on Instance uuid af370de1-e4d7-4312-bc72-c6398eeaf2ed {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 841.973802] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654473, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.008158] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d8ad4f-5314-dea7-f855-ee821006a1eb, 'name': SearchDatastore_Task, 'duration_secs': 0.015196} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.008158] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.008158] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.008158] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.008593] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.008593] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.008808] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-799cf867-896d-45a0-a4d2-e029e67f0b88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.020129] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.020406] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.021291] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ea58a5-c3b7-45c8-b85f-906586885b03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.029162] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 842.029162] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b14d07-db25-d2ce-bafd-f26dded65ae7" [ 842.029162] env[62974]: _type = "Task" [ 842.029162] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.039505] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b14d07-db25-d2ce-bafd-f26dded65ae7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.042337] env[62974]: DEBUG nova.network.neutron [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updated VIF entry in instance network info cache for port 9a104751-f775-4505-a6de-a82f22b2127c. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.042676] env[62974]: DEBUG nova.network.neutron [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.131946] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654475, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.390257} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.132275] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.133148] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e71ef32-2622-4c5c-8891-982ffcc1f883 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.158166] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] c1d0b90c-aa1c-485d-850d-a1495feac7c9/c1d0b90c-aa1c-485d-850d-a1495feac7c9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.158519] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1c43777-b6ca-4bed-9c0d-5225aec42784 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.181530] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 842.181530] env[62974]: value = "task-2654476" [ 842.181530] env[62974]: _type = "Task" [ 842.181530] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.190652] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654476, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.253042] env[62974]: DEBUG nova.scheduler.client.report [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.435733] env[62974]: INFO nova.compute.manager [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Took 42.86 seconds to build instance. [ 842.472032] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654473, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.540840] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b14d07-db25-d2ce-bafd-f26dded65ae7, 'name': SearchDatastore_Task, 'duration_secs': 0.022221} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.541744] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a27d700-2671-4368-ab79-82319f138535 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.545745] env[62974]: DEBUG oslo_concurrency.lockutils [req-8bc33a28-5413-4315-abfe-589f21b52947 req-b2379774-aeac-40db-aeca-3d5d33ef418a service nova] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.547542] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 842.547542] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5272f8e6-7124-7ec9-db63-71a62a3d869c" [ 842.547542] env[62974]: _type = "Task" [ 842.547542] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.555969] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5272f8e6-7124-7ec9-db63-71a62a3d869c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.580476] env[62974]: DEBUG nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 842.608155] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 842.608457] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.608704] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 842.608922] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.609188] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 842.609440] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 842.609777] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 842.610048] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 842.610251] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 842.610421] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 842.610596] env[62974]: DEBUG nova.virt.hardware [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 842.611520] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69f334b-8d93-4518-9777-56b8ca71fb85 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.620445] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f222f5f6-d503-44bf-8bbd-943307bc6350 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.695785] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.762605] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.763640] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.488s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.764069] env[62974]: DEBUG nova.objects.instance [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lazy-loading 'resources' on Instance uuid 6c7401b6-a69f-4de3-aeb9-26c727d57b76 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.785565] env[62974]: INFO nova.scheduler.client.report [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted allocations for instance 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b [ 842.937681] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1eadee79-127d-40a2-89c8-dd083ca1f3aa tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.463s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.978383] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654473, 'name': CloneVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.060249] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5272f8e6-7124-7ec9-db63-71a62a3d869c, 'name': SearchDatastore_Task, 'duration_secs': 0.019729} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.060249] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.060249] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 514e0f15-f27d-4fab-9107-b92884075420/514e0f15-f27d-4fab-9107-b92884075420.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.060249] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0df6f804-5df5-46ed-88ad-060e0fb53b78 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.067816] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 843.067816] env[62974]: value = "task-2654477" [ 843.067816] env[62974]: _type = "Task" [ 843.067816] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.077581] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.197670] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654476, 'name': ReconfigVM_Task, 'duration_secs': 0.791336} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.198536] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Reconfigured VM instance instance-00000048 to attach disk [datastore1] c1d0b90c-aa1c-485d-850d-a1495feac7c9/c1d0b90c-aa1c-485d-850d-a1495feac7c9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.198801] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1eadc027-9b7e-4e50-9578-93cc2e86d856 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.207723] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 843.207723] env[62974]: value = "task-2654478" [ 843.207723] env[62974]: _type = "Task" [ 843.207723] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.219518] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654478, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.297102] env[62974]: DEBUG oslo_concurrency.lockutils [None req-238ccac4-1274-4696-abea-2a4fa31c9289 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "6cee3cf6-2105-40f7-b7f2-5bd38a01a08b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.237s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.310193] env[62974]: DEBUG nova.network.neutron [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance_info_cache with network_info: [{"id": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "address": "fa:16:3e:5f:6a:8c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8424609-cf", "ovs_interfaceid": "f8424609-cf9e-4474-a78b-3d28dbdd7cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.313543] env[62974]: DEBUG nova.compute.manager [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Received event network-changed-77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 843.313834] env[62974]: DEBUG nova.compute.manager [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Refreshing instance network info cache due to event network-changed-77480bd6-dce2-44cc-9b9b-3987573c454f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 843.315239] env[62974]: DEBUG oslo_concurrency.lockutils [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] Acquiring lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.316018] env[62974]: DEBUG oslo_concurrency.lockutils [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] Acquired lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.316018] env[62974]: DEBUG nova.network.neutron [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Refreshing network info cache for port 77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 843.391923] env[62974]: DEBUG nova.network.neutron [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Successfully updated port: 1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 843.473849] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654473, 'name': CloneVM_Task, 'duration_secs': 2.027027} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.474230] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Created linked-clone VM from snapshot [ 843.475286] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978d4f57-36fa-49c4-994c-cbd66a4ef1f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.489543] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Uploading image 365438d4-9deb-4702-9499-47d85b80e3d3 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 843.499848] env[62974]: DEBUG nova.compute.manager [req-0a4887cd-c518-423f-a9a9-d242202bab7f req-5d7176e6-7202-4ca1-8ea8-9f23a3c98a21 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Received event network-vif-plugged-1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 843.500091] env[62974]: DEBUG oslo_concurrency.lockutils [req-0a4887cd-c518-423f-a9a9-d242202bab7f req-5d7176e6-7202-4ca1-8ea8-9f23a3c98a21 service nova] Acquiring lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.500322] env[62974]: DEBUG oslo_concurrency.lockutils [req-0a4887cd-c518-423f-a9a9-d242202bab7f req-5d7176e6-7202-4ca1-8ea8-9f23a3c98a21 service nova] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.501280] env[62974]: DEBUG oslo_concurrency.lockutils [req-0a4887cd-c518-423f-a9a9-d242202bab7f req-5d7176e6-7202-4ca1-8ea8-9f23a3c98a21 service nova] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.501280] env[62974]: DEBUG nova.compute.manager [req-0a4887cd-c518-423f-a9a9-d242202bab7f req-5d7176e6-7202-4ca1-8ea8-9f23a3c98a21 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] No waiting events found dispatching network-vif-plugged-1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 843.501280] env[62974]: WARNING nova.compute.manager [req-0a4887cd-c518-423f-a9a9-d242202bab7f req-5d7176e6-7202-4ca1-8ea8-9f23a3c98a21 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Received unexpected event network-vif-plugged-1461ee04-30d1-4afa-b41b-26e9ea0dc08f for instance with vm_state building and task_state spawning. [ 843.511369] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 843.512142] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4e1d8e40-1483-4a8f-8d34-a426ff6668e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.524577] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 843.524577] env[62974]: value = "task-2654479" [ 843.524577] env[62974]: _type = "Task" [ 843.524577] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.535470] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654479, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.580505] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654477, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.586983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "da43a464-ebae-4038-9f7b-330df22d8d7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.586983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.586983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "da43a464-ebae-4038-9f7b-330df22d8d7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.586983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.587164] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.589053] env[62974]: INFO nova.compute.manager [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Terminating instance [ 843.722261] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654478, 'name': Rename_Task, 'duration_secs': 0.301567} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.722638] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.722963] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8125e290-7d0c-44c1-91ea-ead6c0f4dc17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.735105] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 843.735105] env[62974]: value = "task-2654480" [ 843.735105] env[62974]: _type = "Task" [ 843.735105] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.748237] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.815863] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aeb7507-33a3-427e-8aef-bb1119429555 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.820147] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-af370de1-e4d7-4312-bc72-c6398eeaf2ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.820479] env[62974]: DEBUG nova.objects.instance [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lazy-loading 'migration_context' on Instance uuid af370de1-e4d7-4312-bc72-c6398eeaf2ed {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.833278] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b72cdf4-ccbf-49f1-a0ce-ac65ebdde4bc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.874711] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2e3ea7-2de3-44cb-85ae-52fddf9ad658 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.883903] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d95d663-10fc-4358-8ef5-ee57c510c930 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.901591] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.901750] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.901899] env[62974]: DEBUG nova.network.neutron [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.903820] env[62974]: DEBUG nova.compute.provider_tree [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.036818] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654479, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.082119] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654477, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.76716} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.083121] env[62974]: DEBUG nova.network.neutron [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updated VIF entry in instance network info cache for port 77480bd6-dce2-44cc-9b9b-3987573c454f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.083510] env[62974]: DEBUG nova.network.neutron [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updating instance_info_cache with network_info: [{"id": "77480bd6-dce2-44cc-9b9b-3987573c454f", "address": "fa:16:3e:8f:dc:8c", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77480bd6-dc", "ovs_interfaceid": "77480bd6-dce2-44cc-9b9b-3987573c454f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.086037] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 514e0f15-f27d-4fab-9107-b92884075420/514e0f15-f27d-4fab-9107-b92884075420.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.086037] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.086037] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-882b5b4c-b79d-4d73-8730-10b9b3be1106 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.095692] env[62974]: DEBUG nova.compute.manager [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 844.095929] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.096532] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 844.096532] env[62974]: value = "task-2654481" [ 844.096532] env[62974]: _type = "Task" [ 844.096532] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.097667] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90aa96de-30d3-4ee0-af4e-fca3916aa2aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.111064] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654481, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.113334] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.113599] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2405f226-8c54-4fcb-9a6c-959145c43c90 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.122275] env[62974]: DEBUG oslo_vmware.api [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 844.122275] env[62974]: value = "task-2654482" [ 844.122275] env[62974]: _type = "Task" [ 844.122275] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.133888] env[62974]: DEBUG oslo_vmware.api [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.136552] env[62974]: DEBUG oslo_concurrency.lockutils [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.136883] env[62974]: DEBUG oslo_concurrency.lockutils [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.137234] env[62974]: INFO nova.compute.manager [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Rebooting instance [ 844.246744] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654480, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.326618] env[62974]: DEBUG nova.objects.base [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 844.327624] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b38c65-fbb3-47a4-9941-87da1923440d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.350677] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b4efd03-c16c-4e3a-ab3b-32106b8a613d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.358805] env[62974]: DEBUG oslo_vmware.api [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 844.358805] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bcb325-6de6-42fa-4e14-ac83392aca41" [ 844.358805] env[62974]: _type = "Task" [ 844.358805] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.369147] env[62974]: DEBUG oslo_vmware.api [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bcb325-6de6-42fa-4e14-ac83392aca41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.407037] env[62974]: DEBUG nova.scheduler.client.report [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.446905] env[62974]: DEBUG nova.network.neutron [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.540776] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654479, 'name': Destroy_Task, 'duration_secs': 0.592799} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.541090] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Destroyed the VM [ 844.541346] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 844.541666] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-48f902c7-0b40-49d6-a536-9d40bcad7c18 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.549885] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 844.549885] env[62974]: value = "task-2654483" [ 844.549885] env[62974]: _type = "Task" [ 844.549885] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.561465] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654483, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.585885] env[62974]: DEBUG oslo_concurrency.lockutils [req-9d86742e-afc5-4b28-8105-d6d1c944b949 req-b62932b1-57a3-40f0-9cb4-f27cb21d8837 service nova] Releasing lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.612589] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654481, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11146} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.612887] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.613894] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86031a71-033e-4aaf-9ac7-13d7c68aafa9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.617576] env[62974]: DEBUG nova.network.neutron [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updating instance_info_cache with network_info: [{"id": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "address": "fa:16:3e:90:ba:22", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1461ee04-30", "ovs_interfaceid": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.639651] env[62974]: DEBUG oslo_vmware.api [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654482, 'name': PowerOffVM_Task, 'duration_secs': 0.397037} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.652126] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.652382] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.662025] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 514e0f15-f27d-4fab-9107-b92884075420/514e0f15-f27d-4fab-9107-b92884075420.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.662677] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-302d95e2-ad17-47b4-8222-22a4a248b2a2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.664534] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-530970d6-67b5-48c6-a7b7-c677af6e70a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.681871] env[62974]: DEBUG oslo_concurrency.lockutils [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.682120] env[62974]: DEBUG oslo_concurrency.lockutils [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquired lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.682301] env[62974]: DEBUG nova.network.neutron [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.688346] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 844.688346] env[62974]: value = "task-2654485" [ 844.688346] env[62974]: _type = "Task" [ 844.688346] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.700644] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.747052] env[62974]: DEBUG oslo_vmware.api [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654480, 'name': PowerOnVM_Task, 'duration_secs': 0.820187} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.747262] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.747523] env[62974]: INFO nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Took 8.74 seconds to spawn the instance on the hypervisor. [ 844.747777] env[62974]: DEBUG nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.748640] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb311c59-5b02-4c67-946a-c1dbbe013db5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.766110] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.766344] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.766529] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleting the datastore file [datastore2] da43a464-ebae-4038-9f7b-330df22d8d7c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.766826] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd2e2d49-86e3-4fd3-b440-2d0b8c62901b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.774214] env[62974]: DEBUG oslo_vmware.api [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 844.774214] env[62974]: value = "task-2654486" [ 844.774214] env[62974]: _type = "Task" [ 844.774214] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.784045] env[62974]: DEBUG oslo_vmware.api [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.847723] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f60484-fc75-053a-4cc9-27941832de3a/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 844.849152] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ceeb6f4-e317-4405-8c2e-5382e339a05f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.858241] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f60484-fc75-053a-4cc9-27941832de3a/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 844.858415] env[62974]: ERROR oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f60484-fc75-053a-4cc9-27941832de3a/disk-0.vmdk due to incomplete transfer. [ 844.858684] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-eab42129-96e5-428d-b730-4d2f41ee6a68 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.870662] env[62974]: DEBUG oslo_vmware.api [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bcb325-6de6-42fa-4e14-ac83392aca41, 'name': SearchDatastore_Task, 'duration_secs': 0.023219} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.870799] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.882045] env[62974]: DEBUG oslo_vmware.rw_handles [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f60484-fc75-053a-4cc9-27941832de3a/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 844.882231] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Uploaded image dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 844.884348] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 844.884639] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-992dec66-dd1d-4b19-9ec0-8acbf823fde4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.892306] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 844.892306] env[62974]: value = "task-2654487" [ 844.892306] env[62974]: _type = "Task" [ 844.892306] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.901319] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654487, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.916031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.920275] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.817s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.920539] env[62974]: DEBUG nova.objects.instance [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lazy-loading 'resources' on Instance uuid 366b5816-a847-48d1-ad03-5758e473a9d0 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.942930] env[62974]: INFO nova.scheduler.client.report [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Deleted allocations for instance 6c7401b6-a69f-4de3-aeb9-26c727d57b76 [ 845.065470] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654483, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.121018] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.122458] env[62974]: DEBUG nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Instance network_info: |[{"id": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "address": "fa:16:3e:90:ba:22", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1461ee04-30", "ovs_interfaceid": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 845.122563] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:ba:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1461ee04-30d1-4afa-b41b-26e9ea0dc08f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.130075] env[62974]: DEBUG oslo.service.loopingcall [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.130412] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.130719] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60cce60b-08cf-4781-9d16-870bfe591ee7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.151971] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.151971] env[62974]: value = "task-2654488" [ 845.151971] env[62974]: _type = "Task" [ 845.151971] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.160524] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654488, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.201188] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654485, 'name': ReconfigVM_Task, 'duration_secs': 0.354384} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.201550] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 514e0f15-f27d-4fab-9107-b92884075420/514e0f15-f27d-4fab-9107-b92884075420.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.202283] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c914cc03-d5ee-4703-b489-b28bab4482b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.211078] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 845.211078] env[62974]: value = "task-2654489" [ 845.211078] env[62974]: _type = "Task" [ 845.211078] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.221676] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654489, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.271624] env[62974]: INFO nova.compute.manager [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Took 39.23 seconds to build instance. [ 845.286451] env[62974]: DEBUG oslo_vmware.api [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282913} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.286636] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.287761] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 845.287761] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 845.287761] env[62974]: INFO nova.compute.manager [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Took 1.19 seconds to destroy the instance on the hypervisor. [ 845.287761] env[62974]: DEBUG oslo.service.loopingcall [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.287761] env[62974]: DEBUG nova.compute.manager [-] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 845.287761] env[62974]: DEBUG nova.network.neutron [-] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 845.413986] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654487, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.429057] env[62974]: DEBUG nova.objects.instance [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lazy-loading 'numa_topology' on Instance uuid 366b5816-a847-48d1-ad03-5758e473a9d0 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.453025] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9e476953-e1ad-4072-8f98-1196b78d42d3 tempest-ServerGroupTestJSON-1398951570 tempest-ServerGroupTestJSON-1398951570-project-member] Lock "6c7401b6-a69f-4de3-aeb9-26c727d57b76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.074s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.531214] env[62974]: DEBUG nova.network.neutron [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updating instance_info_cache with network_info: [{"id": "77480bd6-dce2-44cc-9b9b-3987573c454f", "address": "fa:16:3e:8f:dc:8c", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77480bd6-dc", "ovs_interfaceid": "77480bd6-dce2-44cc-9b9b-3987573c454f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.563478] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654483, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.664513] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654488, 'name': CreateVM_Task, 'duration_secs': 0.436927} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.668026] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.668026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.668026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.668026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.668026] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d3d2e58-bce3-440d-bf82-e202f45c6d05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.671665] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 845.671665] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cea6a1-acef-2a9d-86b3-c30cb0b62cb1" [ 845.671665] env[62974]: _type = "Task" [ 845.671665] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.680761] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cea6a1-acef-2a9d-86b3-c30cb0b62cb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.718846] env[62974]: DEBUG nova.compute.manager [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Received event network-changed-1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 845.719279] env[62974]: DEBUG nova.compute.manager [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Refreshing instance network info cache due to event network-changed-1461ee04-30d1-4afa-b41b-26e9ea0dc08f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 845.719974] env[62974]: DEBUG oslo_concurrency.lockutils [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] Acquiring lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.720259] env[62974]: DEBUG oslo_concurrency.lockutils [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] Acquired lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.720539] env[62974]: DEBUG nova.network.neutron [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Refreshing network info cache for port 1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.728294] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654489, 'name': Rename_Task, 'duration_secs': 0.208992} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.733060] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.733060] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bfb4cfc-fc3c-4c9d-9020-2e8bbb1b4958 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.741433] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 845.741433] env[62974]: value = "task-2654490" [ 845.741433] env[62974]: _type = "Task" [ 845.741433] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.754461] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.773682] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83189aef-5de2-4501-9ef9-5d9ee72d2127 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.827s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.904367] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654487, 'name': Destroy_Task, 'duration_secs': 0.676736} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.904506] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Destroyed the VM [ 845.908021] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 845.908021] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3a0451bc-8467-4f46-b799-e82345ad4234 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.912787] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 845.912787] env[62974]: value = "task-2654491" [ 845.912787] env[62974]: _type = "Task" [ 845.912787] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.922741] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654491, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.929502] env[62974]: DEBUG nova.objects.base [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Object Instance<366b5816-a847-48d1-ad03-5758e473a9d0> lazy-loaded attributes: resources,numa_topology {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 846.033830] env[62974]: DEBUG oslo_concurrency.lockutils [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Releasing lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.070838] env[62974]: DEBUG oslo_vmware.api [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654483, 'name': RemoveSnapshot_Task, 'duration_secs': 1.339952} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.071304] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 846.184838] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cea6a1-acef-2a9d-86b3-c30cb0b62cb1, 'name': SearchDatastore_Task, 'duration_secs': 0.04365} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.187962] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.188548] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.188548] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.188838] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.189958] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.189958] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bd7147b-dbfb-4ade-973f-f273db15b02f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.207777] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.207980] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.209105] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deb683e8-5d58-452f-9b8e-0cf0faefdb24 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.219937] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 846.219937] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296b05f-283f-cd25-2e6c-e775c7961592" [ 846.219937] env[62974]: _type = "Task" [ 846.219937] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.233163] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296b05f-283f-cd25-2e6c-e775c7961592, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.234059] env[62974]: DEBUG nova.network.neutron [-] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.256740] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654490, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.430532] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654491, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.458104] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.458444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.476312] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c02316-3d57-4530-897a-cd45c4ae5568 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.486298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a435e9cd-06f7-4142-94f9-bbcaef2bae84 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.525307] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba46b56c-6d9f-48fd-8329-a4a3814b1ec0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.537111] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1844ac9-9d1a-4947-8760-125c0b99b247 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.543903] env[62974]: DEBUG nova.compute.manager [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 846.544734] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d82047-0be2-46ed-a15f-6b4888f22016 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.556220] env[62974]: DEBUG nova.compute.provider_tree [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.579177] env[62974]: DEBUG nova.network.neutron [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updated VIF entry in instance network info cache for port 1461ee04-30d1-4afa-b41b-26e9ea0dc08f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 846.579546] env[62974]: DEBUG nova.network.neutron [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updating instance_info_cache with network_info: [{"id": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "address": "fa:16:3e:90:ba:22", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1461ee04-30", "ovs_interfaceid": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.584026] env[62974]: WARNING nova.compute.manager [None req-93590e82-54cb-4e93-8448-8ed9b0c4f2b9 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Image not found during snapshot: nova.exception.ImageNotFound: Image 365438d4-9deb-4702-9499-47d85b80e3d3 could not be found. [ 846.735531] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296b05f-283f-cd25-2e6c-e775c7961592, 'name': SearchDatastore_Task, 'duration_secs': 0.035637} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.737553] env[62974]: INFO nova.compute.manager [-] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Took 1.45 seconds to deallocate network for instance. [ 846.737553] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1e9333b-51f8-4893-86fd-c416bf266c2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.757894] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 846.757894] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fee618-429b-17f2-c30a-0ede7d42542e" [ 846.757894] env[62974]: _type = "Task" [ 846.757894] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.767731] env[62974]: DEBUG oslo_vmware.api [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654490, 'name': PowerOnVM_Task, 'duration_secs': 0.544498} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.768858] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.769183] env[62974]: INFO nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Took 7.98 seconds to spawn the instance on the hypervisor. [ 846.769435] env[62974]: DEBUG nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 846.771540] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9419327a-a0b3-407c-ad5e-c0bb35606a57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.781618] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fee618-429b-17f2-c30a-0ede7d42542e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.924238] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654491, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.966493] env[62974]: DEBUG nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.061483] env[62974]: DEBUG nova.scheduler.client.report [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.085214] env[62974]: DEBUG oslo_concurrency.lockutils [req-aea325f5-c14d-496d-a0eb-515c0f5f67d0 req-46da46c6-e3f8-4f86-8fd8-ebae9848cbb5 service nova] Releasing lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.254135] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.272023] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fee618-429b-17f2-c30a-0ede7d42542e, 'name': SearchDatastore_Task, 'duration_secs': 0.027247} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.272023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.272023] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.272023] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f63f86f3-045d-4e58-b305-8b294b422f43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.282915] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 847.282915] env[62974]: value = "task-2654492" [ 847.282915] env[62974]: _type = "Task" [ 847.282915] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.291274] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.303735] env[62974]: INFO nova.compute.manager [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Took 38.13 seconds to build instance. [ 847.432714] env[62974]: DEBUG oslo_vmware.api [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654491, 'name': RemoveSnapshot_Task, 'duration_secs': 1.236769} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.433074] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 847.433326] env[62974]: INFO nova.compute.manager [None req-2e1927e0-c47e-4f21-ace8-294461c5b434 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Took 17.93 seconds to snapshot the instance on the hypervisor. [ 847.489966] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.534162] env[62974]: DEBUG nova.compute.manager [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Received event network-changed-5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 847.534162] env[62974]: DEBUG nova.compute.manager [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Refreshing instance network info cache due to event network-changed-5ef50dc0-edb6-41e4-b27b-22e996c326b4. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 847.535025] env[62974]: DEBUG oslo_concurrency.lockutils [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] Acquiring lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.535479] env[62974]: DEBUG oslo_concurrency.lockutils [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] Acquired lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.535820] env[62974]: DEBUG nova.network.neutron [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Refreshing network info cache for port 5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.568299] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.649s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.574160] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.486s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.578021] env[62974]: DEBUG nova.objects.instance [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 847.579403] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a4501f-5ca8-41cf-9e0c-2d7864b7dc6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.589781] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Doing hard reboot of VM {{(pid=62974) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 847.590038] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b5291cef-b34a-4781-9c7e-efc6fbc0d273 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.598973] env[62974]: DEBUG oslo_vmware.api [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 847.598973] env[62974]: value = "task-2654493" [ 847.598973] env[62974]: _type = "Task" [ 847.598973] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.612804] env[62974]: DEBUG oslo_vmware.api [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654493, 'name': ResetVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.796218] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654492, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.807146] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5903aa1a-6181-4ef4-8fea-75c68a821028 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.279s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.830543] env[62974]: DEBUG nova.compute.manager [req-473d0af8-3cf9-4cf3-9167-9d6507b4bd42 req-07d51159-a886-4e97-b75c-756d85ec5abb service nova] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Received event network-vif-deleted-5e6afe42-2743-40f8-8491-2b441697f6aa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 848.080446] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "e42547b0-25b7-4a34-b832-b93103065928" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.081212] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "e42547b0-25b7-4a34-b832-b93103065928" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.081212] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "e42547b0-25b7-4a34-b832-b93103065928-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.082148] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "e42547b0-25b7-4a34-b832-b93103065928-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.082344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "e42547b0-25b7-4a34-b832-b93103065928-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.095222] env[62974]: DEBUG oslo_concurrency.lockutils [None req-83311959-3d08-43e2-ac5c-6fd26db054b7 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 50.788s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.097609] env[62974]: INFO nova.compute.manager [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Terminating instance [ 848.100820] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 23.501s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.101105] env[62974]: INFO nova.compute.manager [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Unshelving [ 848.121946] env[62974]: DEBUG oslo_vmware.api [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654493, 'name': ResetVM_Task, 'duration_secs': 0.1015} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.122664] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Did hard reboot of VM {{(pid=62974) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 848.122664] env[62974]: DEBUG nova.compute.manager [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.123761] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3eb008-94aa-4134-8100-a69941e83c54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.293940] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.81281} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.294221] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.294433] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.294869] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2251f2d-df5f-4219-a836-c24a78be9ce3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.300241] env[62974]: DEBUG nova.network.neutron [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updated VIF entry in instance network info cache for port 5ef50dc0-edb6-41e4-b27b-22e996c326b4. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.300581] env[62974]: DEBUG nova.network.neutron [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating instance_info_cache with network_info: [{"id": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "address": "fa:16:3e:9f:c5:d4", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ef50dc0-ed", "ovs_interfaceid": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.305285] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 848.305285] env[62974]: value = "task-2654494" [ 848.305285] env[62974]: _type = "Task" [ 848.305285] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.318069] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654494, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.601610] env[62974]: DEBUG oslo_concurrency.lockutils [None req-764ed8ee-1ae1-45b2-826f-562d242634ce tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.028s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.602725] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.159s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.604164] env[62974]: INFO nova.compute.claims [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.614202] env[62974]: DEBUG nova.compute.manager [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 848.614202] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.615452] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bcbd75-ccaf-417b-8162-4fd49cf61dd7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.640027] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.640555] env[62974]: DEBUG oslo_concurrency.lockutils [None req-07c63635-3685-4fd9-914c-40104e5ae628 tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.504s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.641430] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfc94360-7cf0-4cee-9c65-261dce6b431e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.649474] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 848.649474] env[62974]: value = "task-2654495" [ 848.649474] env[62974]: _type = "Task" [ 848.649474] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.660879] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.805209] env[62974]: DEBUG oslo_concurrency.lockutils [req-14c5cf4d-8707-4953-ad0d-39aa82ae2e79 req-deb67d40-a7f5-434d-a90b-2eb0620430b0 service nova] Releasing lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.815651] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654494, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070927} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.815935] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.816987] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ca96fd-84ee-427c-8be5-fd3f9eebf6fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.841182] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.841498] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a3c14b2-85cb-4f51-a5c8-0520f0a3996d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.863376] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 848.863376] env[62974]: value = "task-2654496" [ 848.863376] env[62974]: _type = "Task" [ 848.863376] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.873493] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.133206] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.162623] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.375532] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654496, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.618203] env[62974]: DEBUG nova.compute.manager [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Received event network-changed-77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 849.618470] env[62974]: DEBUG nova.compute.manager [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Refreshing instance network info cache due to event network-changed-77480bd6-dce2-44cc-9b9b-3987573c454f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 849.618843] env[62974]: DEBUG oslo_concurrency.lockutils [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] Acquiring lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.619105] env[62974]: DEBUG oslo_concurrency.lockutils [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] Acquired lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.619332] env[62974]: DEBUG nova.network.neutron [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Refreshing network info cache for port 77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.663724] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.858535] env[62974]: DEBUG nova.compute.manager [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Received event network-changed-9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 849.858745] env[62974]: DEBUG nova.compute.manager [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Refreshing instance network info cache due to event network-changed-9a104751-f775-4505-a6de-a82f22b2127c. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 849.858977] env[62974]: DEBUG oslo_concurrency.lockutils [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.859135] env[62974]: DEBUG oslo_concurrency.lockutils [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.859297] env[62974]: DEBUG nova.network.neutron [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Refreshing network info cache for port 9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.882510] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654496, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.038601] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f72642-94bb-485d-b935-f09e08486e4a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.046141] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ad4a7f-3e3b-4333-944c-fd08ca0d3dff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.077431] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e8a2ae-c529-4a2b-922d-ca246d806177 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.085620] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3a299a-72e1-466a-b27c-575315099ac0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.089545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.089775] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.089985] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "6243cce3-8611-46fa-8379-e2f3c825c4dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.090396] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.090396] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.092464] env[62974]: INFO nova.compute.manager [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Terminating instance [ 850.102778] env[62974]: DEBUG nova.compute.provider_tree [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.104831] env[62974]: DEBUG nova.compute.manager [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 850.105042] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.106068] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58428a64-885d-4d0d-beb7-3c291bf7516a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.114069] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.114297] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb1407ee-a233-4a18-be66-a2692fe64df4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.121180] env[62974]: DEBUG oslo_vmware.api [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 850.121180] env[62974]: value = "task-2654497" [ 850.121180] env[62974]: _type = "Task" [ 850.121180] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.132764] env[62974]: DEBUG oslo_vmware.api [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.163339] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.337320] env[62974]: DEBUG nova.network.neutron [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updated VIF entry in instance network info cache for port 77480bd6-dce2-44cc-9b9b-3987573c454f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.337685] env[62974]: DEBUG nova.network.neutron [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updating instance_info_cache with network_info: [{"id": "77480bd6-dce2-44cc-9b9b-3987573c454f", "address": "fa:16:3e:8f:dc:8c", "network": {"id": "35ffa8c1-6f01-493b-b8e7-83bc4038894b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1229634623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0575ed5c28314e939bf91ea58759bf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77480bd6-dc", "ovs_interfaceid": "77480bd6-dce2-44cc-9b9b-3987573c454f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.376897] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654496, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.610042] env[62974]: DEBUG nova.scheduler.client.report [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.614360] env[62974]: DEBUG nova.network.neutron [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updated VIF entry in instance network info cache for port 9a104751-f775-4505-a6de-a82f22b2127c. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.614360] env[62974]: DEBUG nova.network.neutron [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.635767] env[62974]: DEBUG oslo_vmware.api [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654497, 'name': PowerOffVM_Task, 'duration_secs': 0.218417} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.636100] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.636321] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.636614] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da70fcb3-8be9-42e9-87a5-1ebb7792706a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.662573] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.712951] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.713209] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.713488] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Deleting the datastore file [datastore1] 6243cce3-8611-46fa-8379-e2f3c825c4dd {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.713699] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5879fae2-9231-4dfb-be50-85373b62c07b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.721911] env[62974]: DEBUG oslo_vmware.api [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 850.721911] env[62974]: value = "task-2654499" [ 850.721911] env[62974]: _type = "Task" [ 850.721911] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.731247] env[62974]: DEBUG oslo_vmware.api [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.841668] env[62974]: DEBUG oslo_concurrency.lockutils [req-d7ffab47-228b-4c11-bd41-f8d540c1587a req-b0b1c862-f7b3-4f6c-b5cd-1301f159d88a service nova] Releasing lock "refresh_cache-6243cce3-8611-46fa-8379-e2f3c825c4dd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.877324] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654496, 'name': ReconfigVM_Task, 'duration_secs': 1.627083} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.877681] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.878429] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89df24e5-28a7-4e13-81b0-23ed64e1e046 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.885787] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 850.885787] env[62974]: value = "task-2654500" [ 850.885787] env[62974]: _type = "Task" [ 850.885787] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.895346] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654500, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.117359] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.118140] env[62974]: DEBUG nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 851.120837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.446s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.121076] env[62974]: DEBUG nova.objects.instance [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lazy-loading 'resources' on Instance uuid 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.122459] env[62974]: DEBUG oslo_concurrency.lockutils [req-565e5532-8f3c-428f-aa4e-1c1b5488c854 req-310168e0-b381-4071-bd97-9d735971ff38 service nova] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.164890] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.233564] env[62974]: DEBUG oslo_vmware.api [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234666} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.233819] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.234009] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.234199] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.234366] env[62974]: INFO nova.compute.manager [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 851.234624] env[62974]: DEBUG oslo.service.loopingcall [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.234820] env[62974]: DEBUG nova.compute.manager [-] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 851.235650] env[62974]: DEBUG nova.network.neutron [-] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.397473] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654500, 'name': Rename_Task, 'duration_secs': 0.153928} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.397932] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.398038] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba985bb3-1e3e-4705-9287-9f589d6f39f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.407023] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 851.407023] env[62974]: value = "task-2654501" [ 851.407023] env[62974]: _type = "Task" [ 851.407023] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.416256] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.625360] env[62974]: DEBUG nova.compute.utils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 851.630106] env[62974]: DEBUG nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 851.630316] env[62974]: DEBUG nova.network.neutron [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 851.666775] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.674394] env[62974]: DEBUG nova.policy [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a86bbc98ec50467792b3c6a6cedc790b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14dd4a9a77ad40458d40bb82ac4b90a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 851.898123] env[62974]: DEBUG nova.compute.manager [req-99e348d1-bfa1-4401-8119-6ee169256c70 req-052620d5-64aa-4c5f-a746-cd0d35003848 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Received event network-vif-deleted-77480bd6-dce2-44cc-9b9b-3987573c454f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 851.899347] env[62974]: INFO nova.compute.manager [req-99e348d1-bfa1-4401-8119-6ee169256c70 req-052620d5-64aa-4c5f-a746-cd0d35003848 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Neutron deleted interface 77480bd6-dce2-44cc-9b9b-3987573c454f; detaching it from the instance and deleting it from the info cache [ 851.900677] env[62974]: DEBUG nova.network.neutron [req-99e348d1-bfa1-4401-8119-6ee169256c70 req-052620d5-64aa-4c5f-a746-cd0d35003848 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.925281] env[62974]: DEBUG oslo_vmware.api [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654501, 'name': PowerOnVM_Task, 'duration_secs': 0.518923} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.928186] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.928415] env[62974]: INFO nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Took 9.35 seconds to spawn the instance on the hypervisor. [ 851.928593] env[62974]: DEBUG nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.929875] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2c6038-c47c-4e49-ad1d-72d8de46553c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.974682] env[62974]: DEBUG nova.network.neutron [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Successfully created port: 87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.991461] env[62974]: DEBUG nova.network.neutron [-] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.114131] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f11e6e5-7e7e-4364-95a3-ba56bef6abf2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.120457] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07aecd51-5811-41e8-9276-0a40f1fe6238 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.156764] env[62974]: DEBUG nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 852.164302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7712cd55-8031-4d63-bfb5-c9f7e58715bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.174764] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.180039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a71df0-2dc7-44e6-b722-4a30c5f77266 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.196128] env[62974]: DEBUG nova.compute.provider_tree [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.403996] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f83c2215-c985-4919-bac0-ee9e06bd0cc2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.415317] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6428793d-a1e6-4d3b-94b5-fed92029af3b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.464201] env[62974]: DEBUG nova.compute.manager [req-99e348d1-bfa1-4401-8119-6ee169256c70 req-052620d5-64aa-4c5f-a746-cd0d35003848 service nova] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Detach interface failed, port_id=77480bd6-dce2-44cc-9b9b-3987573c454f, reason: Instance 6243cce3-8611-46fa-8379-e2f3c825c4dd could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 852.466984] env[62974]: INFO nova.compute.manager [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Took 37.99 seconds to build instance. [ 852.493725] env[62974]: INFO nova.compute.manager [-] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Took 1.26 seconds to deallocate network for instance. [ 852.676377] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654495, 'name': PowerOffVM_Task, 'duration_secs': 3.625603} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.676692] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 852.676997] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 852.677285] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5ea5261-eb8b-4db7-b7e5-b11a6536e796 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.699866] env[62974]: DEBUG nova.scheduler.client.report [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.755905] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 852.756114] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 852.756299] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleting the datastore file [datastore1] e42547b0-25b7-4a34-b832-b93103065928 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.756583] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-206629d7-1de6-4ead-9598-5b57699c71b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.767021] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 852.767021] env[62974]: value = "task-2654503" [ 852.767021] env[62974]: _type = "Task" [ 852.767021] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.775702] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.969687] env[62974]: DEBUG oslo_concurrency.lockutils [None req-45c1fbe4-2070-4ab2-baea-de392558a2bc tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.095s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.000723] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.170515] env[62974]: DEBUG nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 853.206077] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.085s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.210451] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 853.210451] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.210451] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 853.210688] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.210766] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 853.210908] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 853.212103] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 853.212103] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 853.212103] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 853.212103] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 853.212103] env[62974]: DEBUG nova.virt.hardware [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 853.212306] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.749s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.212502] env[62974]: DEBUG nova.objects.instance [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lazy-loading 'resources' on Instance uuid 6e8f07c2-60da-4bad-a7af-8c83294e232f {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.214286] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e4fb23-b3f3-4972-8744-857e0a1a60ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.224097] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1944d466-2772-4158-b631-e26c067d1ec7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.245959] env[62974]: INFO nova.scheduler.client.report [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Deleted allocations for instance 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7 [ 853.278424] env[62974]: DEBUG oslo_vmware.api [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141413} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.278684] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 853.278909] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 853.279104] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 853.279780] env[62974]: INFO nova.compute.manager [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: e42547b0-25b7-4a34-b832-b93103065928] Took 4.67 seconds to destroy the instance on the hypervisor. [ 853.279780] env[62974]: DEBUG oslo.service.loopingcall [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.279780] env[62974]: DEBUG nova.compute.manager [-] [instance: e42547b0-25b7-4a34-b832-b93103065928] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 853.279938] env[62974]: DEBUG nova.network.neutron [-] [instance: e42547b0-25b7-4a34-b832-b93103065928] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 853.447777] env[62974]: DEBUG nova.compute.manager [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Received event network-changed-1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.447976] env[62974]: DEBUG nova.compute.manager [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Refreshing instance network info cache due to event network-changed-1461ee04-30d1-4afa-b41b-26e9ea0dc08f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 853.448564] env[62974]: DEBUG oslo_concurrency.lockutils [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] Acquiring lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.448749] env[62974]: DEBUG oslo_concurrency.lockutils [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] Acquired lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.448943] env[62974]: DEBUG nova.network.neutron [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Refreshing network info cache for port 1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.661985] env[62974]: DEBUG nova.network.neutron [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Successfully updated port: 87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.756373] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9e11983-cf15-4094-8dc2-455b841dd474 tempest-ListImageFiltersTestJSON-656357188 tempest-ListImageFiltersTestJSON-656357188-project-member] Lock "8bd478ab-a101-4d6a-9e7c-bfde0fce81c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.652s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.980048] env[62974]: DEBUG nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Received event network-vif-plugged-87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.980324] env[62974]: DEBUG oslo_concurrency.lockutils [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] Acquiring lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.980634] env[62974]: DEBUG oslo_concurrency.lockutils [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.980962] env[62974]: DEBUG oslo_concurrency.lockutils [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.981096] env[62974]: DEBUG nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] No waiting events found dispatching network-vif-plugged-87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 853.981327] env[62974]: WARNING nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Received unexpected event network-vif-plugged-87b5b3a5-74b0-4465-a533-043f1f583030 for instance with vm_state building and task_state spawning. [ 853.981528] env[62974]: DEBUG nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Received event network-changed-87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.981734] env[62974]: DEBUG nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Refreshing instance network info cache due to event network-changed-87b5b3a5-74b0-4465-a533-043f1f583030. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 853.982102] env[62974]: DEBUG oslo_concurrency.lockutils [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] Acquiring lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.982444] env[62974]: DEBUG oslo_concurrency.lockutils [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] Acquired lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.982570] env[62974]: DEBUG nova.network.neutron [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Refreshing network info cache for port 87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.164484] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.176100] env[62974]: DEBUG nova.network.neutron [-] [instance: e42547b0-25b7-4a34-b832-b93103065928] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.222513] env[62974]: DEBUG nova.network.neutron [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updated VIF entry in instance network info cache for port 1461ee04-30d1-4afa-b41b-26e9ea0dc08f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.222890] env[62974]: DEBUG nova.network.neutron [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updating instance_info_cache with network_info: [{"id": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "address": "fa:16:3e:90:ba:22", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1461ee04-30", "ovs_interfaceid": "1461ee04-30d1-4afa-b41b-26e9ea0dc08f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.238873] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c71aaac-1fb4-46be-ae6a-7c9c264c521c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.247882] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3da15c-e294-42bd-b814-9f12a01ab025 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.282439] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cebfee5-7603-461d-952f-a3f21f808d46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.291918] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac30463-de2b-4899-976a-d58ec02c7a54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.307090] env[62974]: DEBUG nova.compute.provider_tree [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.524224] env[62974]: DEBUG nova.network.neutron [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.606803] env[62974]: DEBUG nova.network.neutron [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.679982] env[62974]: INFO nova.compute.manager [-] [instance: e42547b0-25b7-4a34-b832-b93103065928] Took 1.40 seconds to deallocate network for instance. [ 854.726108] env[62974]: DEBUG oslo_concurrency.lockutils [req-935e59bb-21a6-41bf-96f1-5001bb757845 req-02d14779-aa9c-4f96-981a-b521d202ed68 service nova] Releasing lock "refresh_cache-3df97cea-5a6e-4d7a-b2f3-e02213816e24" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.810489] env[62974]: DEBUG nova.scheduler.client.report [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.111074] env[62974]: DEBUG oslo_concurrency.lockutils [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] Releasing lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.111788] env[62974]: DEBUG nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Received event network-vif-deleted-4c5397a0-f933-4f39-911d-525d8d7e5aac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 855.111788] env[62974]: INFO nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Neutron deleted interface 4c5397a0-f933-4f39-911d-525d8d7e5aac; detaching it from the instance and deleting it from the info cache [ 855.111994] env[62974]: DEBUG nova.network.neutron [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.112990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.113646] env[62974]: DEBUG nova.network.neutron [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.186472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.316034] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.103s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.317965] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.483s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.318262] env[62974]: DEBUG nova.objects.instance [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lazy-loading 'resources' on Instance uuid 0bc05477-1802-4f8b-8d23-2742f9baf603 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.338010] env[62974]: INFO nova.scheduler.client.report [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Deleted allocations for instance 6e8f07c2-60da-4bad-a7af-8c83294e232f [ 855.617084] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-613d2bc1-b1ae-4598-8937-3bbd754ce8a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.634254] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66952cef-6aff-4e2b-a640-07a8223d55a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.675644] env[62974]: DEBUG nova.compute.manager [req-728cd239-1fe8-4a86-8b4d-9ef06299c835 req-992bf5f9-3e87-4e50-bd96-52b0ac3b3266 service nova] [instance: e42547b0-25b7-4a34-b832-b93103065928] Detach interface failed, port_id=4c5397a0-f933-4f39-911d-525d8d7e5aac, reason: Instance e42547b0-25b7-4a34-b832-b93103065928 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 855.677033] env[62974]: DEBUG nova.network.neutron [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.798251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "59ece0e8-85c2-499d-aba2-fd45fc116013" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.798251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.854037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-42cca789-da30-4387-8ebb-b91c5bdd15f8 tempest-ServerShowV254Test-1145974182 tempest-ServerShowV254Test-1145974182-project-member] Lock "6e8f07c2-60da-4bad-a7af-8c83294e232f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.686s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.867125] env[62974]: DEBUG nova.network.neutron [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Updating instance_info_cache with network_info: [{"id": "87b5b3a5-74b0-4465-a533-043f1f583030", "address": "fa:16:3e:4e:16:55", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b5b3a5-74", "ovs_interfaceid": "87b5b3a5-74b0-4465-a533-043f1f583030", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.252370] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5211b408-d542-4a23-88ae-6f620a430f5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.260194] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6b82f5-d475-4267-90e1-cb691e399141 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.289802] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c105b630-c234-4be8-a15d-e8432cda8f8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.297049] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea675baf-e0a6-4d76-95cb-fa22871f49fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.301554] env[62974]: DEBUG nova.compute.utils [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 856.314205] env[62974]: DEBUG nova.compute.provider_tree [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.370779] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.370779] env[62974]: DEBUG nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Instance network_info: |[{"id": "87b5b3a5-74b0-4465-a533-043f1f583030", "address": "fa:16:3e:4e:16:55", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b5b3a5-74", "ovs_interfaceid": "87b5b3a5-74b0-4465-a533-043f1f583030", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 856.371173] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:16:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87b5b3a5-74b0-4465-a533-043f1f583030', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.378674] env[62974]: DEBUG oslo.service.loopingcall [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.379025] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.379286] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8587b50-9b08-4e9a-b8dc-dbbc10c2ea6c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.404602] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.404602] env[62974]: value = "task-2654504" [ 856.404602] env[62974]: _type = "Task" [ 856.404602] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.414269] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654504, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.804536] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.818182] env[62974]: DEBUG nova.scheduler.client.report [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 856.914292] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654504, 'name': CreateVM_Task, 'duration_secs': 0.386767} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.914454] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.915155] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.915329] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.915666] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.915913] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d774149-3faf-4554-8208-40e4d54cbbd0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.920128] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 856.920128] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527b0d18-c6ce-99e0-656d-9403bd106fa6" [ 856.920128] env[62974]: _type = "Task" [ 856.920128] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.927399] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527b0d18-c6ce-99e0-656d-9403bd106fa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.322958] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.325345] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.252s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.326598] env[62974]: DEBUG nova.objects.instance [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lazy-loading 'resources' on Instance uuid d8b7a39f-ec73-4a87-9b1e-9428ca72f895 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.348286] env[62974]: INFO nova.scheduler.client.report [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleted allocations for instance 0bc05477-1802-4f8b-8d23-2742f9baf603 [ 857.430625] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527b0d18-c6ce-99e0-656d-9403bd106fa6, 'name': SearchDatastore_Task, 'duration_secs': 0.009936} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.430996] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.431174] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.431423] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.431569] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.431746] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.432008] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5889aec5-feed-4d4d-82b4-6af9874ffa5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.440489] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.440585] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.441316] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-581ca5b0-88fe-455b-9357-e7e5ff435b70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.446272] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 857.446272] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fda3a5-5776-69bb-e8f6-01b503c7dad3" [ 857.446272] env[62974]: _type = "Task" [ 857.446272] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.453359] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fda3a5-5776-69bb-e8f6-01b503c7dad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.855946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf46aab-c2d8-44be-8f88-aab160d2a90a tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "0bc05477-1802-4f8b-8d23-2742f9baf603" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.555s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.874367] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "59ece0e8-85c2-499d-aba2-fd45fc116013" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.874808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.875042] env[62974]: INFO nova.compute.manager [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Attaching volume 59c980c4-278c-43af-afdb-98f8cef8d2b8 to /dev/sdb [ 857.906532] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b23e63c-4a82-44ee-b602-ff35555370da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.915545] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cd11db-16d1-41c6-8ea5-cd98a5abb2f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.928857] env[62974]: DEBUG nova.virt.block_device [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Updating existing volume attachment record: 22b9fec1-e80c-44ea-8d2c-98712b50886b {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 857.961417] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fda3a5-5776-69bb-e8f6-01b503c7dad3, 'name': SearchDatastore_Task, 'duration_secs': 0.008471} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.962594] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14d5712b-c9f6-4ea3-ad29-701ed260dda6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.970409] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 857.970409] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ef01d-17f5-c400-9e10-91c2b2f7356b" [ 857.970409] env[62974]: _type = "Task" [ 857.970409] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.979319] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ef01d-17f5-c400-9e10-91c2b2f7356b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.206491] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fc85db-9a18-4e8d-8b56-9ff7c815fbeb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.214659] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204835e7-8c15-43fe-b27d-1464c450c36a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.245279] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4500b8-cde0-4380-a392-a88ce5c8d054 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.253255] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2ca6b0-4c5a-4293-962a-f62b23f025c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.267093] env[62974]: DEBUG nova.compute.provider_tree [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.481293] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ef01d-17f5-c400-9e10-91c2b2f7356b, 'name': SearchDatastore_Task, 'duration_secs': 0.010487} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.481582] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.481855] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7/6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.482144] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43652166-f974-4c06-b5a2-6ae5419e95b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.488438] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 858.488438] env[62974]: value = "task-2654508" [ 858.488438] env[62974]: _type = "Task" [ 858.488438] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.496035] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.770192] env[62974]: DEBUG nova.scheduler.client.report [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.935062] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.935062] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.935062] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.935353] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.935353] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.937579] env[62974]: INFO nova.compute.manager [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Terminating instance [ 858.998579] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44723} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.998983] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7/6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 858.999222] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.999494] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c15e88de-1559-400b-94ee-587114fa6dfd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.005693] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 859.005693] env[62974]: value = "task-2654509" [ 859.005693] env[62974]: _type = "Task" [ 859.005693] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.012970] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.276297] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.951s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.280102] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.755s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.280379] env[62974]: DEBUG nova.objects.instance [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lazy-loading 'resources' on Instance uuid d6ce3f68-a757-48bc-abeb-49c3aacdf465 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.301883] env[62974]: INFO nova.scheduler.client.report [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Deleted allocations for instance d8b7a39f-ec73-4a87-9b1e-9428ca72f895 [ 859.442814] env[62974]: DEBUG nova.compute.manager [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 859.443066] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 859.444019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce12e7a5-d84b-4f8a-9c72-ca763d06c574 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.451560] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.451808] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93342e5e-4953-444e-9419-55f570f3f6a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.458666] env[62974]: DEBUG oslo_vmware.api [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 859.458666] env[62974]: value = "task-2654510" [ 859.458666] env[62974]: _type = "Task" [ 859.458666] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.467309] env[62974]: DEBUG oslo_vmware.api [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654510, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.515418] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069642} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.515745] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.516452] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8f755a-bcee-449a-85ab-8648718b2ffe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.539478] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7/6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.539661] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cf2f0b9-ea5d-4834-85e6-59f378aaa1d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.560906] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 859.560906] env[62974]: value = "task-2654511" [ 859.560906] env[62974]: _type = "Task" [ 859.560906] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.569302] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.809674] env[62974]: DEBUG oslo_concurrency.lockutils [None req-043f4b72-3040-4b17-87e9-98da3e327b68 tempest-MigrationsAdminTest-126169443 tempest-MigrationsAdminTest-126169443-project-member] Lock "d8b7a39f-ec73-4a87-9b1e-9428ca72f895" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.175s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.969703] env[62974]: DEBUG oslo_vmware.api [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654510, 'name': PowerOffVM_Task, 'duration_secs': 0.224792} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.970116] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.970383] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.970871] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8e14df5-5651-4b89-ade0-5f041b184041 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.038142] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 860.038395] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 860.038603] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleting the datastore file [datastore2] eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.038929] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01fbf2a3-1f30-4e65-a43f-b078d70a3e7f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.051256] env[62974]: DEBUG oslo_vmware.api [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 860.051256] env[62974]: value = "task-2654513" [ 860.051256] env[62974]: _type = "Task" [ 860.051256] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.059219] env[62974]: DEBUG oslo_vmware.api [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.070205] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.163221] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8543a668-2af8-4901-a4c5-571619811a41 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.170911] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a284c97d-f708-43bc-8c92-18904d5da596 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.201149] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58e3781-3abb-4f83-ac6a-6262dded5611 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.208337] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b338983-88f4-4e55-b58e-543a029d3e20 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.221390] env[62974]: DEBUG nova.compute.provider_tree [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.564329] env[62974]: DEBUG oslo_vmware.api [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654513, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232463} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.567429] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.567631] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 860.567809] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 860.567986] env[62974]: INFO nova.compute.manager [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Took 1.12 seconds to destroy the instance on the hypervisor. [ 860.568241] env[62974]: DEBUG oslo.service.loopingcall [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.568442] env[62974]: DEBUG nova.compute.manager [-] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 860.568536] env[62974]: DEBUG nova.network.neutron [-] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 860.575373] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654511, 'name': ReconfigVM_Task, 'duration_secs': 0.779515} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.575610] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7/6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.576189] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-248bd7cd-2a8a-40be-9bcc-a2f1d1c4beea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.582038] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 860.582038] env[62974]: value = "task-2654515" [ 860.582038] env[62974]: _type = "Task" [ 860.582038] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.589983] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654515, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.725105] env[62974]: DEBUG nova.scheduler.client.report [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.843968] env[62974]: DEBUG nova.compute.manager [req-9ae22c46-496c-4c63-9bc4-12dff9b6fe26 req-d307db82-71c1-443a-9603-2984cc819cd9 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Received event network-vif-deleted-9f1050c5-0ced-4039-b2a7-cea11ae0f227 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 860.844220] env[62974]: INFO nova.compute.manager [req-9ae22c46-496c-4c63-9bc4-12dff9b6fe26 req-d307db82-71c1-443a-9603-2984cc819cd9 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Neutron deleted interface 9f1050c5-0ced-4039-b2a7-cea11ae0f227; detaching it from the instance and deleting it from the info cache [ 860.844559] env[62974]: DEBUG nova.network.neutron [req-9ae22c46-496c-4c63-9bc4-12dff9b6fe26 req-d307db82-71c1-443a-9603-2984cc819cd9 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.094552] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654515, 'name': Rename_Task, 'duration_secs': 0.384946} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.094552] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.094552] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de1da6c2-d369-4692-8b91-08a60447f22b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.103041] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 861.103041] env[62974]: value = "task-2654516" [ 861.103041] env[62974]: _type = "Task" [ 861.103041] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.112730] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.233529] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.953s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.236058] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.645s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.236305] env[62974]: DEBUG nova.objects.instance [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lazy-loading 'resources' on Instance uuid 1c7fabf7-ba82-4628-9016-b0f198add99a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.264844] env[62974]: INFO nova.scheduler.client.report [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleted allocations for instance d6ce3f68-a757-48bc-abeb-49c3aacdf465 [ 861.320761] env[62974]: DEBUG nova.network.neutron [-] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.347097] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2a75b33-0d00-463d-8a73-89f481ae19a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.356893] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6b299d-bd8e-484c-a7a4-352870404c29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.396674] env[62974]: DEBUG nova.compute.manager [req-9ae22c46-496c-4c63-9bc4-12dff9b6fe26 req-d307db82-71c1-443a-9603-2984cc819cd9 service nova] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Detach interface failed, port_id=9f1050c5-0ced-4039-b2a7-cea11ae0f227, reason: Instance eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 861.613972] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.774844] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c0490b6c-2bcc-4879-ac8e-4de96258adb6 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "d6ce3f68-a757-48bc-abeb-49c3aacdf465" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.335s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.825271] env[62974]: INFO nova.compute.manager [-] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Took 1.26 seconds to deallocate network for instance. [ 862.121778] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.191654] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f38ad9-574f-4c5c-80c1-a8cf947f7e05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.202793] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c16553d-6b77-4088-ae6f-585d52d5062d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.236026] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53888385-41ac-4d1b-97b9-578ed787de9d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.245113] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8205bf3e-d209-443f-8c84-b68bdbe05e07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.259156] env[62974]: DEBUG nova.compute.provider_tree [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.333042] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.476690] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 862.476944] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535406', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'name': 'volume-59c980c4-278c-43af-afdb-98f8cef8d2b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '59ece0e8-85c2-499d-aba2-fd45fc116013', 'attached_at': '', 'detached_at': '', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'serial': '59c980c4-278c-43af-afdb-98f8cef8d2b8'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 862.477848] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c0e825-e008-4aa6-8d33-7ff141d69859 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.494677] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4f2bd4-19e0-4338-b850-4ca7eb2e7a10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.527352] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] volume-59c980c4-278c-43af-afdb-98f8cef8d2b8/volume-59c980c4-278c-43af-afdb-98f8cef8d2b8.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.528098] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56e986b7-bbc8-46ef-8328-bbb9ecf1061e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.546877] env[62974]: DEBUG oslo_vmware.api [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 862.546877] env[62974]: value = "task-2654517" [ 862.546877] env[62974]: _type = "Task" [ 862.546877] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.554599] env[62974]: DEBUG oslo_vmware.api [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.615062] env[62974]: DEBUG oslo_vmware.api [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654516, 'name': PowerOnVM_Task, 'duration_secs': 1.061237} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.615360] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.615583] env[62974]: INFO nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Took 9.44 seconds to spawn the instance on the hypervisor. [ 862.615935] env[62974]: DEBUG nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 862.616863] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656a5c62-01d4-4cc0-bcbf-9bf06c33f6d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.762741] env[62974]: DEBUG nova.scheduler.client.report [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.058879] env[62974]: DEBUG oslo_vmware.api [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654517, 'name': ReconfigVM_Task, 'duration_secs': 0.430799} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.058879] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Reconfigured VM instance instance-00000025 to attach disk [datastore1] volume-59c980c4-278c-43af-afdb-98f8cef8d2b8/volume-59c980c4-278c-43af-afdb-98f8cef8d2b8.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 863.061823] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2786fa3c-88db-41d5-b716-c467608000dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.077174] env[62974]: DEBUG oslo_vmware.api [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 863.077174] env[62974]: value = "task-2654518" [ 863.077174] env[62974]: _type = "Task" [ 863.077174] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.085872] env[62974]: DEBUG oslo_vmware.api [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.141132] env[62974]: INFO nova.compute.manager [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Took 36.72 seconds to build instance. [ 863.272818] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.276136] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 18.405s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.303936] env[62974]: INFO nova.scheduler.client.report [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Deleted allocations for instance 1c7fabf7-ba82-4628-9016-b0f198add99a [ 863.588202] env[62974]: DEBUG oslo_vmware.api [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654518, 'name': ReconfigVM_Task, 'duration_secs': 0.152231} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.588531] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535406', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'name': 'volume-59c980c4-278c-43af-afdb-98f8cef8d2b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '59ece0e8-85c2-499d-aba2-fd45fc116013', 'attached_at': '', 'detached_at': '', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'serial': '59c980c4-278c-43af-afdb-98f8cef8d2b8'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 863.645594] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1e52b2f6-d04a-40de-8627-8dc18d17154b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.239s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.734905] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "79448002-daa3-4afd-bd1b-36d734642a9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.735231] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "79448002-daa3-4afd-bd1b-36d734642a9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.816057] env[62974]: DEBUG oslo_concurrency.lockutils [None req-72f494d4-504e-4a98-a0fc-56be6f023957 tempest-ListServersNegativeTestJSON-71281647 tempest-ListServersNegativeTestJSON-71281647-project-member] Lock "1c7fabf7-ba82-4628-9016-b0f198add99a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.235s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.973770] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.974774] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.975055] env[62974]: INFO nova.compute.manager [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Shelving [ 864.230298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888e7bc0-9457-414a-8c6a-d76886848f85 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.239098] env[62974]: DEBUG nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 864.244194] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb84f481-c838-4433-ade8-5907583bb847 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.280137] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a15f1e-9cd6-462a-821b-e8aaba45bfcb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.289049] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc07d8f6-3a23-4ebc-8ba7-da6b62061a13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.304456] env[62974]: DEBUG nova.compute.provider_tree [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.648993] env[62974]: DEBUG nova.objects.instance [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lazy-loading 'flavor' on Instance uuid 59ece0e8-85c2-499d-aba2-fd45fc116013 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.771681] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.808197] env[62974]: DEBUG nova.scheduler.client.report [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.992767] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.993126] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c676e2c-745b-4748-b908-7ad5eeb6cd20 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.001444] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 865.001444] env[62974]: value = "task-2654519" [ 865.001444] env[62974]: _type = "Task" [ 865.001444] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.011816] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654519, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.155924] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e3d09564-7665-47fd-9941-db438ad79058 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.281s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.511583] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654519, 'name': PowerOffVM_Task, 'duration_secs': 0.420422} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.511860] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 865.513132] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09772d4c-4636-454f-9258-48c04cf7a642 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.536737] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8877d6-205c-4d24-a07f-0291ff61ceed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.668807] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "59ece0e8-85c2-499d-aba2-fd45fc116013" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.669138] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.818915] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.543s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.821851] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.571s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.822272] env[62974]: DEBUG nova.objects.instance [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lazy-loading 'resources' on Instance uuid da43a464-ebae-4038-9f7b-330df22d8d7c {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.049645] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 866.049645] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ba938aa1-ddbf-44c2-a980-95379519be3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.057159] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 866.057159] env[62974]: value = "task-2654520" [ 866.057159] env[62974]: _type = "Task" [ 866.057159] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.068515] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654520, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.172457] env[62974]: INFO nova.compute.manager [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Detaching volume 59c980c4-278c-43af-afdb-98f8cef8d2b8 [ 866.215767] env[62974]: INFO nova.virt.block_device [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Attempting to driver detach volume 59c980c4-278c-43af-afdb-98f8cef8d2b8 from mountpoint /dev/sdb [ 866.215994] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 866.219315] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535406', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'name': 'volume-59c980c4-278c-43af-afdb-98f8cef8d2b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '59ece0e8-85c2-499d-aba2-fd45fc116013', 'attached_at': '', 'detached_at': '', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'serial': '59c980c4-278c-43af-afdb-98f8cef8d2b8'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 866.219315] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7158dcab-6d9e-404d-9aea-b975d322a1f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.244692] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7a7cb1-a550-40ef-94c4-53a1c8b93bad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.253885] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a1d6b2-e1cd-4fc4-ab0e-09acd4b13de3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.276574] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc8f6ef-7043-42c5-9cf3-1464945592b6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.292438] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] The volume has not been displaced from its original location: [datastore1] volume-59c980c4-278c-43af-afdb-98f8cef8d2b8/volume-59c980c4-278c-43af-afdb-98f8cef8d2b8.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 866.298915] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Reconfiguring VM instance instance-00000025 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 866.299095] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-737e7b54-0963-470f-8eef-6c1a6d1ddd2e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.318675] env[62974]: DEBUG oslo_vmware.api [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 866.318675] env[62974]: value = "task-2654521" [ 866.318675] env[62974]: _type = "Task" [ 866.318675] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.333741] env[62974]: DEBUG oslo_vmware.api [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654521, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.420305] env[62974]: INFO nova.scheduler.client.report [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted allocation for migration 53944e14-f97c-4750-952b-d31a40fddfbe [ 866.572112] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654520, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.835291] env[62974]: DEBUG oslo_vmware.api [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654521, 'name': ReconfigVM_Task, 'duration_secs': 0.325015} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.835291] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Reconfigured VM instance instance-00000025 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 866.841258] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8121325a-948b-4fa7-9970-aa1aa25f5bb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.844162] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffb8bee0-cdc0-424c-9ec3-276e912bffb4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.874254] env[62974]: DEBUG oslo_vmware.api [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 866.874254] env[62974]: value = "task-2654522" [ 866.874254] env[62974]: _type = "Task" [ 866.874254] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.875271] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fa9956-a7d4-491b-b0ec-ea1bca033ff9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.887808] env[62974]: DEBUG oslo_vmware.api [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.914485] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7b6f4b-472b-4eed-b36d-1cffebbca9f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.922697] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c89600d-2d48-43f3-ae24-c85c9291a51e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.939023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-065e5e29-75a5-4d49-9bfa-2109d5ce2261 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 25.647s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.939933] env[62974]: DEBUG nova.compute.provider_tree [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.072225] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654520, 'name': CreateSnapshot_Task, 'duration_secs': 1.010773} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.072225] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 867.072225] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbf324e-2ee7-4f3c-8535-327a41e3014e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.389712] env[62974]: DEBUG oslo_vmware.api [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654522, 'name': ReconfigVM_Task, 'duration_secs': 0.22299} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.390160] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535406', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'name': 'volume-59c980c4-278c-43af-afdb-98f8cef8d2b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '59ece0e8-85c2-499d-aba2-fd45fc116013', 'attached_at': '', 'detached_at': '', 'volume_id': '59c980c4-278c-43af-afdb-98f8cef8d2b8', 'serial': '59c980c4-278c-43af-afdb-98f8cef8d2b8'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 867.443527] env[62974]: DEBUG nova.scheduler.client.report [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.600112] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 867.600112] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1532b09d-8a3b-4a8f-a3e4-a90e7b4f9dc9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.612677] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 867.612677] env[62974]: value = "task-2654523" [ 867.612677] env[62974]: _type = "Task" [ 867.612677] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.626359] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654523, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.948826] env[62974]: DEBUG nova.objects.instance [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lazy-loading 'flavor' on Instance uuid 59ece0e8-85c2-499d-aba2-fd45fc116013 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.953344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.129s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.957878] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.466s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.958935] env[62974]: INFO nova.compute.claims [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.998212] env[62974]: INFO nova.scheduler.client.report [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted allocations for instance da43a464-ebae-4038-9f7b-330df22d8d7c [ 868.126311] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654523, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.377335] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.377629] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.377863] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.378104] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.378305] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.381918] env[62974]: INFO nova.compute.manager [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Terminating instance [ 868.510911] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e6d71734-8857-4773-836e-40bd686fa10f tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "da43a464-ebae-4038-9f7b-330df22d8d7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.925s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.624686] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654523, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.886834] env[62974]: DEBUG nova.compute.manager [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 868.886834] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.887390] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dba0b5a-d117-47fd-a03c-15c55ec9ae07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.898034] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.898034] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5af770ab-f3e8-485b-957e-493fea118586 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.903573] env[62974]: DEBUG oslo_vmware.api [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 868.903573] env[62974]: value = "task-2654524" [ 868.903573] env[62974]: _type = "Task" [ 868.903573] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.912550] env[62974]: DEBUG oslo_vmware.api [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.971754] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b21a265-c839-4d09-949c-ebd99c271006 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.303s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.011665] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "e11408df-466c-4101-b0cc-3621cda78a45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.014305] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.133660] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654523, 'name': CloneVM_Task, 'duration_secs': 1.480244} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.133660] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Created linked-clone VM from snapshot [ 869.133660] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2a6e2c-8e1e-4596-9273-22259f51e99b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.143987] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Uploading image 51282f4d-262b-45c5-b475-115919afa115 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 869.176267] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 869.176267] env[62974]: value = "vm-535408" [ 869.176267] env[62974]: _type = "VirtualMachine" [ 869.176267] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 869.176802] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2e68814f-528f-414b-94e7-f407c044a047 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.184816] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lease: (returnval){ [ 869.184816] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b63709-5cc6-13d4-3141-5afe36b8c0af" [ 869.184816] env[62974]: _type = "HttpNfcLease" [ 869.184816] env[62974]: } obtained for exporting VM: (result){ [ 869.184816] env[62974]: value = "vm-535408" [ 869.184816] env[62974]: _type = "VirtualMachine" [ 869.184816] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 869.184816] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the lease: (returnval){ [ 869.184816] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b63709-5cc6-13d4-3141-5afe36b8c0af" [ 869.184816] env[62974]: _type = "HttpNfcLease" [ 869.184816] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 869.192031] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.192031] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b63709-5cc6-13d4-3141-5afe36b8c0af" [ 869.192031] env[62974]: _type = "HttpNfcLease" [ 869.192031] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 869.417309] env[62974]: DEBUG oslo_vmware.api [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654524, 'name': PowerOffVM_Task, 'duration_secs': 0.190015} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.420109] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.420109] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.420109] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9113f374-eaa7-4725-90af-48b386966072 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.447323] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.447636] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.447841] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.448127] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.448415] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.451582] env[62974]: INFO nova.compute.manager [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Terminating instance [ 869.470866] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd14a35d-d49c-428f-9a19-1a784177ace6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.482355] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b775b9c0-2710-492a-87cf-279294f25c62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.490407] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.490677] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.490865] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleting the datastore file [datastore2] af370de1-e4d7-4312-bc72-c6398eeaf2ed {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.519221] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7096c45-9060-4d97-8316-d5f99fb04860 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.521393] env[62974]: DEBUG nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 869.526986] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7f6128-3d76-4704-8d38-bc2d43ba8b07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.539498] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0da5343-24ca-474b-928a-6943aad38343 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.544869] env[62974]: DEBUG oslo_vmware.api [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 869.544869] env[62974]: value = "task-2654527" [ 869.544869] env[62974]: _type = "Task" [ 869.544869] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.558404] env[62974]: DEBUG nova.compute.provider_tree [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.564476] env[62974]: DEBUG oslo_vmware.api [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.585444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.585444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.692731] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.692731] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b63709-5cc6-13d4-3141-5afe36b8c0af" [ 869.692731] env[62974]: _type = "HttpNfcLease" [ 869.692731] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 869.693120] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 869.693120] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b63709-5cc6-13d4-3141-5afe36b8c0af" [ 869.693120] env[62974]: _type = "HttpNfcLease" [ 869.693120] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 869.693874] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ec3534-24d9-48f1-a803-51b8f36c952f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.701483] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520fef9c-aff4-7514-5cbb-49559cde2b19/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 869.701483] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520fef9c-aff4-7514-5cbb-49559cde2b19/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 869.804081] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-89215550-088c-4677-a8ce-c3e77e1acafb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.961261] env[62974]: DEBUG nova.compute.manager [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 869.961261] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.961792] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1749b2d5-4ea0-4625-a92b-0b9be4baa09c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.970062] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 869.970309] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c9211d9-49ed-457c-8efa-0dea5e25e436 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.976550] env[62974]: DEBUG oslo_vmware.api [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 869.976550] env[62974]: value = "task-2654528" [ 869.976550] env[62974]: _type = "Task" [ 869.976550] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.985976] env[62974]: DEBUG oslo_vmware.api [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.055452] env[62974]: DEBUG oslo_vmware.api [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158091} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.055518] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.055733] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 870.055998] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 870.056756] env[62974]: INFO nova.compute.manager [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Took 1.17 seconds to destroy the instance on the hypervisor. [ 870.056756] env[62974]: DEBUG oslo.service.loopingcall [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.058251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.059081] env[62974]: DEBUG nova.compute.manager [-] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 870.059081] env[62974]: DEBUG nova.network.neutron [-] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.061817] env[62974]: DEBUG nova.scheduler.client.report [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.088570] env[62974]: DEBUG nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 870.412371] env[62974]: DEBUG nova.compute.manager [req-b6d7b480-bd06-4e8b-bd4c-b41985f7d1eb req-dccf8d54-8805-4232-b394-c54524dc22a8 service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Received event network-vif-deleted-f8424609-cf9e-4474-a78b-3d28dbdd7cb0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 870.412522] env[62974]: INFO nova.compute.manager [req-b6d7b480-bd06-4e8b-bd4c-b41985f7d1eb req-dccf8d54-8805-4232-b394-c54524dc22a8 service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Neutron deleted interface f8424609-cf9e-4474-a78b-3d28dbdd7cb0; detaching it from the instance and deleting it from the info cache [ 870.412937] env[62974]: DEBUG nova.network.neutron [req-b6d7b480-bd06-4e8b-bd4c-b41985f7d1eb req-dccf8d54-8805-4232-b394-c54524dc22a8 service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.487926] env[62974]: DEBUG oslo_vmware.api [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654528, 'name': PowerOffVM_Task, 'duration_secs': 0.212315} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.488532] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.488532] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.488783] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ac653ca-78a4-4643-adb1-a34e37e74974 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.518218] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.518550] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.555312] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.555577] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.555818] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleting the datastore file [datastore2] 85f8f79d-330a-49cd-b1ae-8de20c70fcab {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.556152] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28096b4e-61ca-4dc0-9833-ce4089d250b3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.562707] env[62974]: DEBUG oslo_vmware.api [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 870.562707] env[62974]: value = "task-2654530" [ 870.562707] env[62974]: _type = "Task" [ 870.562707] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.567154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.567731] env[62974]: DEBUG nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 870.572009] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.438s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.572009] env[62974]: DEBUG nova.objects.instance [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lazy-loading 'pci_requests' on Instance uuid 366b5816-a847-48d1-ad03-5758e473a9d0 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.578206] env[62974]: DEBUG oslo_vmware.api [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.620226] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.892992] env[62974]: DEBUG nova.network.neutron [-] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.916026] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b008472c-5b08-4490-b48b-eee095d833a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.927609] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057d0ce3-16e5-45df-a9c6-a4a0148cc926 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.968028] env[62974]: DEBUG nova.compute.manager [req-b6d7b480-bd06-4e8b-bd4c-b41985f7d1eb req-dccf8d54-8805-4232-b394-c54524dc22a8 service nova] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Detach interface failed, port_id=f8424609-cf9e-4474-a78b-3d28dbdd7cb0, reason: Instance af370de1-e4d7-4312-bc72-c6398eeaf2ed could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 871.023538] env[62974]: DEBUG nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.075665] env[62974]: DEBUG nova.compute.utils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 871.078086] env[62974]: DEBUG oslo_vmware.api [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29538} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.079685] env[62974]: DEBUG nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 871.080274] env[62974]: DEBUG nova.network.neutron [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 871.084659] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.084659] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.084659] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.084659] env[62974]: INFO nova.compute.manager [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Took 1.12 seconds to destroy the instance on the hypervisor. [ 871.084659] env[62974]: DEBUG oslo.service.loopingcall [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.087650] env[62974]: DEBUG nova.objects.instance [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lazy-loading 'numa_topology' on Instance uuid 366b5816-a847-48d1-ad03-5758e473a9d0 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.091351] env[62974]: DEBUG nova.compute.manager [-] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 871.091351] env[62974]: DEBUG nova.network.neutron [-] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.170412] env[62974]: DEBUG nova.policy [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e07ae60010640d88de0d3b716914186', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd914830aaf454e26b77cbb46722764ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 871.396243] env[62974]: INFO nova.compute.manager [-] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Took 1.34 seconds to deallocate network for instance. [ 871.556872] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.579641] env[62974]: DEBUG nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 871.593956] env[62974]: INFO nova.compute.claims [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.603070] env[62974]: DEBUG nova.network.neutron [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Successfully created port: 0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 871.908871] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.026636] env[62974]: DEBUG nova.network.neutron [-] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.439855] env[62974]: DEBUG nova.compute.manager [req-d1c0b1dd-2e1f-4cfe-a221-2b4a4c107d61 req-073d32fd-6b94-4753-9afa-9cc36fc120d7 service nova] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Received event network-vif-deleted-30f39769-41ea-4d00-81eb-e86870ef4bae {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 872.530445] env[62974]: INFO nova.compute.manager [-] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Took 1.44 seconds to deallocate network for instance. [ 872.589450] env[62974]: DEBUG nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 872.616467] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ecb60bd65a4c6e4840cbb01d5594a1fa',container_format='bare',created_at=2025-02-19T03:55:30Z,direct_url=,disk_format='vmdk',id=dc9ab7c2-bb31-4c76-aa09-3ecc18818c83,min_disk=1,min_ram=0,name='tempest-test-snap-1052220571',owner='d914830aaf454e26b77cbb46722764ba',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-02-19T03:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 872.616797] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.616992] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 872.617197] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.617341] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 872.617486] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 872.617687] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 872.617864] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 872.618062] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 872.618230] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 872.618399] env[62974]: DEBUG nova.virt.hardware [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 872.619327] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aeafe0a-4694-4fce-9aa3-fa0d1e09e98e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.630713] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a139bf5-8a8d-4364-9d3b-e3e4144902ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.003381] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbc23f6-a7e0-45c9-8297-3f9fe00cfd83 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.012210] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e7a1c6-03d5-49c2-b8f4-ad64af0b8278 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.048926] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.053088] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2f6b0d-b2b0-4aa9-b570-7b0247474148 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.063509] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0e66fa-5e02-4cd9-a69b-a7e60c3d6fb8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.081409] env[62974]: DEBUG nova.compute.provider_tree [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.108794] env[62974]: DEBUG nova.network.neutron [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Successfully updated port: 0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.584509] env[62974]: DEBUG nova.scheduler.client.report [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.611454] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "refresh_cache-70adaccf-44ab-44b1-ac8a-005d42c09f0a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.611623] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "refresh_cache-70adaccf-44ab-44b1-ac8a-005d42c09f0a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.611773] env[62974]: DEBUG nova.network.neutron [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.090510] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.519s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.092886] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.092s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.093145] env[62974]: DEBUG nova.objects.instance [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lazy-loading 'resources' on Instance uuid 6243cce3-8611-46fa-8379-e2f3c825c4dd {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.152030] env[62974]: INFO nova.network.neutron [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating port 07b0aa8b-b38d-489b-9998-6efe6126083f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 874.169635] env[62974]: DEBUG nova.network.neutron [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.408654] env[62974]: DEBUG nova.network.neutron [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Updating instance_info_cache with network_info: [{"id": "0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9", "address": "fa:16:3e:6c:85:a2", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cccd0c9-28", "ovs_interfaceid": "0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.472497] env[62974]: DEBUG nova.compute.manager [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Received event network-vif-plugged-0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 874.472606] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] Acquiring lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.472790] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.472954] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.473408] env[62974]: DEBUG nova.compute.manager [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] No waiting events found dispatching network-vif-plugged-0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 874.473611] env[62974]: WARNING nova.compute.manager [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Received unexpected event network-vif-plugged-0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 for instance with vm_state building and task_state spawning. [ 874.473778] env[62974]: DEBUG nova.compute.manager [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Received event network-changed-0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 874.473936] env[62974]: DEBUG nova.compute.manager [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Refreshing instance network info cache due to event network-changed-0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 874.474125] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] Acquiring lock "refresh_cache-70adaccf-44ab-44b1-ac8a-005d42c09f0a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.913037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "refresh_cache-70adaccf-44ab-44b1-ac8a-005d42c09f0a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.913342] env[62974]: DEBUG nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Instance network_info: |[{"id": "0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9", "address": "fa:16:3e:6c:85:a2", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cccd0c9-28", "ovs_interfaceid": "0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 874.913844] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] Acquired lock "refresh_cache-70adaccf-44ab-44b1-ac8a-005d42c09f0a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.914045] env[62974]: DEBUG nova.network.neutron [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Refreshing network info cache for port 0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.915253] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:85:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06cc7c49-c46c-4c1e-bf51-77e9ea802c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.923211] env[62974]: DEBUG oslo.service.loopingcall [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.923687] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.923956] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91c1d118-e65b-4531-a0e1-4732b0775f2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.946409] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.946409] env[62974]: value = "task-2654531" [ 874.946409] env[62974]: _type = "Task" [ 874.946409] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.960528] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654531, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.002421] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e79332-4154-4a37-ab58-d3f0e2cb6fb8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.010093] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a0ef2c-b54a-47b4-8e2a-dcf35e838168 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.042978] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e01f63d-5b7f-41f8-8d6b-af65300e2057 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.051711] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca3bf0c-357b-47f4-8412-aef69c494939 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.068037] env[62974]: DEBUG nova.compute.provider_tree [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.457581] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654531, 'name': CreateVM_Task, 'duration_secs': 0.468204} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.457900] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.460678] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.460891] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.461236] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 875.461509] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39fc5336-a567-40f8-aa49-375a66339517 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.466199] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 875.466199] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5c18a-6c5e-3c07-5472-61e5fcbf656c" [ 875.466199] env[62974]: _type = "Task" [ 875.466199] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.473932] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5c18a-6c5e-3c07-5472-61e5fcbf656c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.571260] env[62974]: DEBUG nova.scheduler.client.report [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.706894] env[62974]: DEBUG nova.network.neutron [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Updated VIF entry in instance network info cache for port 0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.707189] env[62974]: DEBUG nova.network.neutron [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Updating instance_info_cache with network_info: [{"id": "0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9", "address": "fa:16:3e:6c:85:a2", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cccd0c9-28", "ovs_interfaceid": "0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.837813] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.837813] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.837813] env[62974]: DEBUG nova.network.neutron [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.977414] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.977691] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Processing image dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.977972] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.978092] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.978289] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.978547] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2eb9b2ea-26ad-46e9-a90f-0432af38d6d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.991189] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.991189] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.991427] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b3f0102-a91d-4b85-8c7d-d9a4aefc690a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.996789] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 875.996789] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52552306-7b8d-5010-9a42-d25966743fa7" [ 875.996789] env[62974]: _type = "Task" [ 875.996789] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.006284] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52552306-7b8d-5010-9a42-d25966743fa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.076931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.984s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.079508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.893s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.079890] env[62974]: DEBUG nova.objects.instance [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lazy-loading 'resources' on Instance uuid e42547b0-25b7-4a34-b832-b93103065928 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 876.102419] env[62974]: INFO nova.scheduler.client.report [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Deleted allocations for instance 6243cce3-8611-46fa-8379-e2f3c825c4dd [ 876.210132] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1c5cc62-c8ee-40b9-8a4d-11b157b6a365 req-bd66a17f-48b5-46dc-b1e8-031e7b1ca8f9 service nova] Releasing lock "refresh_cache-70adaccf-44ab-44b1-ac8a-005d42c09f0a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.507554] env[62974]: DEBUG nova.compute.manager [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received event network-vif-plugged-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 876.507996] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.508439] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.508599] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.508803] env[62974]: DEBUG nova.compute.manager [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] No waiting events found dispatching network-vif-plugged-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 876.509133] env[62974]: WARNING nova.compute.manager [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received unexpected event network-vif-plugged-07b0aa8b-b38d-489b-9998-6efe6126083f for instance with vm_state shelved_offloaded and task_state spawning. [ 876.509425] env[62974]: DEBUG nova.compute.manager [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received event network-changed-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 876.509706] env[62974]: DEBUG nova.compute.manager [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Refreshing instance network info cache due to event network-changed-07b0aa8b-b38d-489b-9998-6efe6126083f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 876.509995] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] Acquiring lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.519904] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Preparing fetch location {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 876.520275] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Fetch image to [datastore2] OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47/OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47.vmdk {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 876.520574] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Downloading stream optimized image dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 to [datastore2] OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47/OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47.vmdk on the data store datastore2 as vApp {{(pid=62974) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 876.520970] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Downloading image file data dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 to the ESX as VM named 'OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47' {{(pid=62974) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 876.602421] env[62974]: DEBUG nova.network.neutron [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": "07b0aa8b-b38d-489b-9998-6efe6126083f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.606740] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 876.606740] env[62974]: value = "resgroup-9" [ 876.606740] env[62974]: _type = "ResourcePool" [ 876.606740] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 876.607144] env[62974]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-45bfe9c9-1ad7-4190-8a40-169a8bdeffd1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.632789] env[62974]: DEBUG oslo_concurrency.lockutils [None req-13a8978f-5b54-4628-90f2-d5d74669d7bd tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6243cce3-8611-46fa-8379-e2f3c825c4dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.543s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.639492] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease: (returnval){ [ 876.639492] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c95502-9482-f87e-1e9e-5e46e23d3cfa" [ 876.639492] env[62974]: _type = "HttpNfcLease" [ 876.639492] env[62974]: } obtained for vApp import into resource pool (val){ [ 876.639492] env[62974]: value = "resgroup-9" [ 876.639492] env[62974]: _type = "ResourcePool" [ 876.639492] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 876.639762] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the lease: (returnval){ [ 876.639762] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c95502-9482-f87e-1e9e-5e46e23d3cfa" [ 876.639762] env[62974]: _type = "HttpNfcLease" [ 876.639762] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 876.649551] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 876.649551] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c95502-9482-f87e-1e9e-5e46e23d3cfa" [ 876.649551] env[62974]: _type = "HttpNfcLease" [ 876.649551] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 877.004815] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c469b86-753c-4ad7-b8b6-d840f5b16355 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.013485] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad77cbf-8fc5-4c8f-8f8c-064016af94de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.047250] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8f3831-03d5-4a69-9ef2-54bde504a2a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.055414] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a970fd48-267a-45fa-83f0-8dca46079587 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.070134] env[62974]: DEBUG nova.compute.provider_tree [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.105583] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.108424] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] Acquired lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.108679] env[62974]: DEBUG nova.network.neutron [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Refreshing network info cache for port 07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 877.123695] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.123969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.148449] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 877.148449] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c95502-9482-f87e-1e9e-5e46e23d3cfa" [ 877.148449] env[62974]: _type = "HttpNfcLease" [ 877.148449] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 877.330069] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='90547b77b760d6ffe3fbf2fb9feea18b',container_format='bare',created_at=2025-02-19T03:54:59Z,direct_url=,disk_format='vmdk',id=a51f6776-a571-4d03-938a-5a97a88c6d55,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1860511717-shelved',owner='57e631c2e78a4391bceb20072992f8bd',properties=ImageMetaProps,protected=,size=31663104,status='active',tags=,updated_at=2025-02-19T03:55:18Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.330340] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.330494] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.330673] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.330816] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.331344] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.332026] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.332301] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.332502] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.332676] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.332856] env[62974]: DEBUG nova.virt.hardware [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.335422] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8791243-28f7-4b9c-be8a-049e10bd9ab4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.344863] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520fef9c-aff4-7514-5cbb-49559cde2b19/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 877.346199] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfc46fd-a1dc-43c1-b944-5085ee5b71f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.350749] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fa7527-1085-48b0-8700-fcee30a5d20c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.364622] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:15:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f54f7284-8f7d-47ee-839d-2143062cfe44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07b0aa8b-b38d-489b-9998-6efe6126083f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.372264] env[62974]: DEBUG oslo.service.loopingcall [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.374027] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.374767] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520fef9c-aff4-7514-5cbb-49559cde2b19/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 877.374767] env[62974]: ERROR oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520fef9c-aff4-7514-5cbb-49559cde2b19/disk-0.vmdk due to incomplete transfer. [ 877.374767] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f382d356-28ee-4fb3-b39a-a26dba549f8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.390036] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bcab8a7d-84a5-4ef4-ba12-3d624212ce98 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.396152] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.396152] env[62974]: value = "task-2654533" [ 877.396152] env[62974]: _type = "Task" [ 877.396152] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.399880] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520fef9c-aff4-7514-5cbb-49559cde2b19/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 877.400086] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Uploaded image 51282f4d-262b-45c5-b475-115919afa115 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 877.402254] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 877.402744] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3667c650-cb79-4224-b555-219d588d4de1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.406579] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654533, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.410999] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 877.410999] env[62974]: value = "task-2654534" [ 877.410999] env[62974]: _type = "Task" [ 877.410999] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.418273] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654534, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.573989] env[62974]: DEBUG nova.scheduler.client.report [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.626540] env[62974]: DEBUG nova.compute.utils [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.652060] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 877.652060] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c95502-9482-f87e-1e9e-5e46e23d3cfa" [ 877.652060] env[62974]: _type = "HttpNfcLease" [ 877.652060] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 877.652060] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 877.652060] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c95502-9482-f87e-1e9e-5e46e23d3cfa" [ 877.652060] env[62974]: _type = "HttpNfcLease" [ 877.652060] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 877.652818] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b061019-7200-4fc8-abc3-ba8a334ef67b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.661701] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52aeeeff-4b82-5556-fb5d-c596b4329ff7/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 877.661947] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52aeeeff-4b82-5556-fb5d-c596b4329ff7/disk-0.vmdk. {{(pid=62974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 877.720365] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.720669] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.720822] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.721000] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.721185] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.725396] env[62974]: INFO nova.compute.manager [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Terminating instance [ 877.731510] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e07d25db-0335-478f-8fe4-e3ec90971286 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.861679] env[62974]: DEBUG nova.network.neutron [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updated VIF entry in instance network info cache for port 07b0aa8b-b38d-489b-9998-6efe6126083f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 877.862091] env[62974]: DEBUG nova.network.neutron [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": "07b0aa8b-b38d-489b-9998-6efe6126083f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.908072] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654533, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.920431] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654534, 'name': Destroy_Task, 'duration_secs': 0.329} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.922965] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Destroyed the VM [ 877.923236] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 877.923487] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-88e288b4-192c-4ad4-86a0-9a6a158f500c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.929954] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 877.929954] env[62974]: value = "task-2654535" [ 877.929954] env[62974]: _type = "Task" [ 877.929954] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.938294] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654535, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.079257] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.083836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.751s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.084107] env[62974]: DEBUG nova.objects.instance [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lazy-loading 'resources' on Instance uuid eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 878.106557] env[62974]: INFO nova.scheduler.client.report [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleted allocations for instance e42547b0-25b7-4a34-b832-b93103065928 [ 878.129832] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.229898] env[62974]: DEBUG nova.compute.manager [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.230183] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.231073] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50501022-5d3e-43b1-ab85-7ab8edcf1037 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.238879] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.239119] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba151414-b69d-40cb-a748-961ded7bdbac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.245108] env[62974]: DEBUG oslo_vmware.api [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 878.245108] env[62974]: value = "task-2654536" [ 878.245108] env[62974]: _type = "Task" [ 878.245108] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.252573] env[62974]: DEBUG oslo_vmware.api [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.365408] env[62974]: DEBUG oslo_concurrency.lockutils [req-a1b5c448-67e1-45bb-b168-1909afa30e43 req-8d8ac6dd-ba98-4a79-b27c-150d7011d927 service nova] Releasing lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.408741] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654533, 'name': CreateVM_Task, 'duration_secs': 0.814385} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.408741] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.411032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.411172] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.411471] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 878.411789] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66de932c-ede1-4018-87d0-77798d50d814 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.416845] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 878.416845] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52932cd8-abce-d205-bd3a-b6108708a894" [ 878.416845] env[62974]: _type = "Task" [ 878.416845] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.429373] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52932cd8-abce-d205-bd3a-b6108708a894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.440345] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654535, 'name': RemoveSnapshot_Task, 'duration_secs': 0.416867} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.442519] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 878.442871] env[62974]: DEBUG nova.compute.manager [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 878.443886] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d02eb0b-cb58-4da0-888a-cc9c68c8c973 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.455088] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Completed reading data from the image iterator. {{(pid=62974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 878.456108] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52aeeeff-4b82-5556-fb5d-c596b4329ff7/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 878.457501] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51d6d15-a31d-44b8-916b-76d770ee6879 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.464914] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52aeeeff-4b82-5556-fb5d-c596b4329ff7/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 878.465209] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52aeeeff-4b82-5556-fb5d-c596b4329ff7/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 878.465543] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-86570638-4d38-41e0-82e0-2d5ddeaf5971 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.619506] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a94237aa-5ab1-4ac0-87c1-f27deb45f38c tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "e42547b0-25b7-4a34-b832-b93103065928" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.539s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.666292] env[62974]: DEBUG oslo_vmware.rw_handles [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52aeeeff-4b82-5556-fb5d-c596b4329ff7/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 878.666627] env[62974]: INFO nova.virt.vmwareapi.images [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Downloaded image file data dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 [ 878.667462] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c92f79-bf15-44db-af37-2d9844475adf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.686453] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8e8472d-44c7-41c1-ae8d-38072b2395a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.718679] env[62974]: INFO nova.virt.vmwareapi.images [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] The imported VM was unregistered [ 878.720354] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Caching image {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 878.720645] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating directory with path [datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.721460] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1c123bc-a946-451a-91c4-f866a8108f88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.732555] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created directory with path [datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.732555] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47/OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47.vmdk to [datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83.vmdk. {{(pid=62974) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 878.732555] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ffea1a67-52e9-445e-8124-4ce548bf9955 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.738573] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 878.738573] env[62974]: value = "task-2654538" [ 878.738573] env[62974]: _type = "Task" [ 878.738573] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.752492] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654538, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.758940] env[62974]: DEBUG oslo_vmware.api [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654536, 'name': PowerOffVM_Task, 'duration_secs': 0.213338} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.759297] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.759488] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.760408] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cc6b878-dd7a-4b79-931f-0ab4188d2dad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.823135] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.823382] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.823639] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Deleting the datastore file [datastore1] 6928b412-e8cb-42fb-bc47-dc8498f12ad1 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.823847] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74e18192-d270-4f77-98b5-36b2990d7d67 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.830621] env[62974]: DEBUG oslo_vmware.api [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for the task: (returnval){ [ 878.830621] env[62974]: value = "task-2654540" [ 878.830621] env[62974]: _type = "Task" [ 878.830621] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.841680] env[62974]: DEBUG oslo_vmware.api [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.930960] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.931269] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Processing image a51f6776-a571-4d03-938a-5a97a88c6d55 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.931518] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55/a51f6776-a571-4d03-938a-5a97a88c6d55.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.931674] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55/a51f6776-a571-4d03-938a-5a97a88c6d55.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.931853] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.932154] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13dce45d-f419-4361-8246-90998f5313a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.959998] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.960128] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.961220] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a95ea299-6a9e-4033-9bcf-a66804dcebdc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.968033] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f18643a-2bef-423b-bd06-f036a8b93c39 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.972591] env[62974]: INFO nova.compute.manager [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Shelve offloading [ 878.978658] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 878.978658] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52389a0d-bec8-52a2-ab39-d5e21ebad8af" [ 878.978658] env[62974]: _type = "Task" [ 878.978658] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.987640] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e15c9ed-aed7-4c7b-939d-c42c871050ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.025510] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ca0cbf-00bc-4128-af2d-a0dfac12c7cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.028809] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Preparing fetch location {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 879.029125] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Fetch image to [datastore2] OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917/OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917.vmdk {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 879.029318] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Downloading stream optimized image a51f6776-a571-4d03-938a-5a97a88c6d55 to [datastore2] OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917/OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917.vmdk on the data store datastore2 as vApp {{(pid=62974) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 879.029487] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Downloading image file data a51f6776-a571-4d03-938a-5a97a88c6d55 to the ESX as VM named 'OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917' {{(pid=62974) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 879.040534] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3124e0-df84-45fe-aca9-fe75b7731313 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.055960] env[62974]: DEBUG nova.compute.provider_tree [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.122334] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 879.122334] env[62974]: value = "resgroup-9" [ 879.122334] env[62974]: _type = "ResourcePool" [ 879.122334] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 879.122936] env[62974]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-912c0fe2-23d3-4b68-b16d-66395b4e9009 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.151357] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lease: (returnval){ [ 879.151357] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 879.151357] env[62974]: _type = "HttpNfcLease" [ 879.151357] env[62974]: } obtained for vApp import into resource pool (val){ [ 879.151357] env[62974]: value = "resgroup-9" [ 879.151357] env[62974]: _type = "ResourcePool" [ 879.151357] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 879.151878] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the lease: (returnval){ [ 879.151878] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 879.151878] env[62974]: _type = "HttpNfcLease" [ 879.151878] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 879.161716] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 879.161716] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 879.161716] env[62974]: _type = "HttpNfcLease" [ 879.161716] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 879.203717] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.203991] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.204433] env[62974]: INFO nova.compute.manager [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Attaching volume e582231b-0f13-489f-96dd-9dd8e2561572 to /dev/sdb [ 879.249380] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f91d537-dc04-4efb-8dba-8f5ae7d7fef6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.259068] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654538, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.260987] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73f1309-051b-4951-bed3-2c11c51f332f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.276755] env[62974]: DEBUG nova.virt.block_device [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating existing volume attachment record: 46bbb1b0-1338-47e1-a854-df74d8971e86 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 879.350026] env[62974]: DEBUG oslo_vmware.api [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Task: {'id': task-2654540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176093} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.350026] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.350026] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.350026] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.350026] env[62974]: INFO nova.compute.manager [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 879.350694] env[62974]: DEBUG oslo.service.loopingcall [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.350694] env[62974]: DEBUG nova.compute.manager [-] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 879.350694] env[62974]: DEBUG nova.network.neutron [-] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.481297] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.481297] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0bd8e6f-0e8e-431d-9a63-a9a6eccef869 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.486681] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 879.486681] env[62974]: value = "task-2654543" [ 879.486681] env[62974]: _type = "Task" [ 879.486681] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.498302] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 879.498513] env[62974]: DEBUG nova.compute.manager [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.499371] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5f14a1-eafb-4775-8c8c-efde2856f526 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.507291] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.507456] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.507624] env[62974]: DEBUG nova.network.neutron [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.561324] env[62974]: DEBUG nova.scheduler.client.report [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 879.664407] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 879.664407] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 879.664407] env[62974]: _type = "HttpNfcLease" [ 879.664407] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 879.753583] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654538, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.808190] env[62974]: DEBUG nova.compute.manager [req-c85e31fe-2fb8-4f6e-bf15-766a37114234 req-0821f344-440e-403e-a241-9306818abaea service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Received event network-vif-deleted-618880a5-40af-4192-80d0-09a7533719d1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 879.808190] env[62974]: INFO nova.compute.manager [req-c85e31fe-2fb8-4f6e-bf15-766a37114234 req-0821f344-440e-403e-a241-9306818abaea service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Neutron deleted interface 618880a5-40af-4192-80d0-09a7533719d1; detaching it from the instance and deleting it from the info cache [ 879.808190] env[62974]: DEBUG nova.network.neutron [req-c85e31fe-2fb8-4f6e-bf15-766a37114234 req-0821f344-440e-403e-a241-9306818abaea service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.928929] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.929236] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.929454] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.929635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.929802] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.932209] env[62974]: INFO nova.compute.manager [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Terminating instance [ 880.066893] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.983s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.069686] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.298s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.071516] env[62974]: INFO nova.compute.claims [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.096569] env[62974]: INFO nova.scheduler.client.report [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleted allocations for instance eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26 [ 880.155733] env[62974]: DEBUG nova.network.neutron [-] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.167659] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 880.167659] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 880.167659] env[62974]: _type = "HttpNfcLease" [ 880.167659] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 880.254735] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654538, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.278707] env[62974]: DEBUG nova.network.neutron [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Updating instance_info_cache with network_info: [{"id": "87b5b3a5-74b0-4465-a533-043f1f583030", "address": "fa:16:3e:4e:16:55", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b5b3a5-74", "ovs_interfaceid": "87b5b3a5-74b0-4465-a533-043f1f583030", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.312576] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c97d3266-8d3b-466c-a271-17b1e8de6979 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.323646] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe3d608-1d0c-44aa-918e-ce34dac9d628 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.364556] env[62974]: DEBUG nova.compute.manager [req-c85e31fe-2fb8-4f6e-bf15-766a37114234 req-0821f344-440e-403e-a241-9306818abaea service nova] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Detach interface failed, port_id=618880a5-40af-4192-80d0-09a7533719d1, reason: Instance 6928b412-e8cb-42fb-bc47-dc8498f12ad1 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 880.437220] env[62974]: DEBUG nova.compute.manager [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 880.437398] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.438403] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ce70c4-228a-49e7-ae8a-648c02d98e5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.448024] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.448024] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f14da5f3-492c-4f70-9349-393b255dde99 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.454562] env[62974]: DEBUG oslo_vmware.api [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 880.454562] env[62974]: value = "task-2654546" [ 880.454562] env[62974]: _type = "Task" [ 880.454562] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.465223] env[62974]: DEBUG oslo_vmware.api [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654546, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.606346] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a0fc265-b70e-4ef0-b87a-7e63a4889c03 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.671s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.663202] env[62974]: INFO nova.compute.manager [-] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Took 1.31 seconds to deallocate network for instance. [ 880.671443] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 880.671443] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 880.671443] env[62974]: _type = "HttpNfcLease" [ 880.671443] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 880.753495] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654538, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.781472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.869764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.870963] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.966520] env[62974]: DEBUG oslo_vmware.api [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654546, 'name': PowerOffVM_Task, 'duration_secs': 0.313431} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.966520] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 880.966751] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 880.967580] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b36639a8-500b-4ed0-9a12-3d2b471ee01b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.041596] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.042240] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.042499] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Deleting the datastore file [datastore1] c38cddae-95b3-4f4a-bf3a-5f0bdde548a9 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.043026] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8db79f2c-18ad-45e0-846f-8d0f3f69cc2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.050968] env[62974]: DEBUG oslo_vmware.api [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for the task: (returnval){ [ 881.050968] env[62974]: value = "task-2654548" [ 881.050968] env[62974]: _type = "Task" [ 881.050968] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.064640] env[62974]: DEBUG oslo_vmware.api [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.106779] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.107888] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b827c0-726d-43b1-ad85-9e8c12ae56b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.119892] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 881.120254] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2f6f986-e1c0-4445-aedc-4d64c3de0075 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.168529] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 881.168529] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 881.168529] env[62974]: _type = "HttpNfcLease" [ 881.168529] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 881.173255] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.210089] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.210379] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.210519] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleting the datastore file [datastore1] 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.213823] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71b004d3-cf90-4cbf-b554-881b912982be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.215881] env[62974]: DEBUG nova.compute.manager [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.216951] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49517427-9059-4437-a60e-48b316dfe697 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.229953] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 881.229953] env[62974]: value = "task-2654550" [ 881.229953] env[62974]: _type = "Task" [ 881.229953] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.239428] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.252544] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654538, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.372493] env[62974]: DEBUG nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 881.453426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77bbaeb-7dc9-46d1-8079-9c307a80f7de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.461343] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6eaa91b-af21-4cbf-bcd3-dbe05c8ac41b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.492072] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003f7690-785f-461d-97b7-05ea698dfafa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.499202] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f739faba-5369-4bdf-81db-898f4dbb77c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.512306] env[62974]: DEBUG nova.compute.provider_tree [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.561247] env[62974]: DEBUG oslo_vmware.api [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.597564] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.597843] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.598089] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "b31dea29-79d6-4117-bdb5-2d38fb660a53-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.598286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.598500] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.600761] env[62974]: INFO nova.compute.manager [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Terminating instance [ 881.665717] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 881.665717] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 881.665717] env[62974]: _type = "HttpNfcLease" [ 881.665717] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 881.732744] env[62974]: INFO nova.compute.manager [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] instance snapshotting [ 881.733433] env[62974]: DEBUG nova.objects.instance [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'flavor' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.744069] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.752358] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654538, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.608356} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.753290] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47/OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47.vmdk to [datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83.vmdk. [ 881.753493] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Cleaning up location [datastore2] OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 881.753652] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_9aed4cb5-4ad4-41be-90e1-79d473a8cd47 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.753919] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdb3d5fa-c5ef-4eb7-b8bf-2362e56314cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.760984] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 881.760984] env[62974]: value = "task-2654551" [ 881.760984] env[62974]: _type = "Task" [ 881.760984] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.769545] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.836338] env[62974]: DEBUG nova.compute.manager [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Received event network-vif-unplugged-87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 881.836558] env[62974]: DEBUG oslo_concurrency.lockutils [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] Acquiring lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.836890] env[62974]: DEBUG oslo_concurrency.lockutils [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.837145] env[62974]: DEBUG oslo_concurrency.lockutils [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.837342] env[62974]: DEBUG nova.compute.manager [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] No waiting events found dispatching network-vif-unplugged-87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 881.837523] env[62974]: WARNING nova.compute.manager [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Received unexpected event network-vif-unplugged-87b5b3a5-74b0-4465-a533-043f1f583030 for instance with vm_state shelved and task_state shelving_offloading. [ 881.837699] env[62974]: DEBUG nova.compute.manager [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Received event network-changed-87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 881.837871] env[62974]: DEBUG nova.compute.manager [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Refreshing instance network info cache due to event network-changed-87b5b3a5-74b0-4465-a533-043f1f583030. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 881.838092] env[62974]: DEBUG oslo_concurrency.lockutils [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] Acquiring lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.838250] env[62974]: DEBUG oslo_concurrency.lockutils [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] Acquired lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.839036] env[62974]: DEBUG nova.network.neutron [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Refreshing network info cache for port 87b5b3a5-74b0-4465-a533-043f1f583030 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.891808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.015229] env[62974]: DEBUG nova.scheduler.client.report [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.061460] env[62974]: DEBUG oslo_vmware.api [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Task: {'id': task-2654548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.934498} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.061764] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.061954] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.062659] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.062659] env[62974]: INFO nova.compute.manager [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Took 1.62 seconds to destroy the instance on the hypervisor. [ 882.062659] env[62974]: DEBUG oslo.service.loopingcall [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.062810] env[62974]: DEBUG nova.compute.manager [-] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 882.062845] env[62974]: DEBUG nova.network.neutron [-] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 882.105146] env[62974]: DEBUG nova.compute.manager [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 882.105393] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 882.106646] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423423ff-c284-4587-9373-bc2a61785f8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.114400] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 882.114670] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9363e8c5-eec7-4c52-904d-0b4e572e3fd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.121650] env[62974]: DEBUG oslo_vmware.api [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 882.121650] env[62974]: value = "task-2654552" [ 882.121650] env[62974]: _type = "Task" [ 882.121650] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.130558] env[62974]: DEBUG oslo_vmware.api [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.168301] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 882.168301] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 882.168301] env[62974]: _type = "HttpNfcLease" [ 882.168301] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 882.168831] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 882.168831] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526dc742-e350-58a1-f267-852cc68e9af3" [ 882.168831] env[62974]: _type = "HttpNfcLease" [ 882.168831] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 882.169813] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1855d720-086a-4e6b-9979-6aef2b3884d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.180685] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a975a5-ec9c-f82a-8889-0aab8e58a27b/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 882.180685] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating HTTP connection to write to file with size = 31663104 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a975a5-ec9c-f82a-8889-0aab8e58a27b/disk-0.vmdk. {{(pid=62974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 882.254301] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e224288c-274d-4eb2-a23c-f8b29076b428 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.265883] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-34c56b15-e110-4239-ae4a-c5d7301ffa7f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.289355] env[62974]: DEBUG oslo_vmware.api [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.796819} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.291130] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce18259-db6e-41c3-b0a2-154c6274fa2a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.294155] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.294398] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.294600] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.302020] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111733} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.302815] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.303050] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.303366] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83.vmdk to [datastore2] 70adaccf-44ab-44b1-ac8a-005d42c09f0a/70adaccf-44ab-44b1-ac8a-005d42c09f0a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.303650] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a8bf89d-cd94-4bcd-8997-225dbba9e4b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.316208] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 882.316208] env[62974]: value = "task-2654553" [ 882.316208] env[62974]: _type = "Task" [ 882.316208] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.330027] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.330027] env[62974]: INFO nova.scheduler.client.report [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted allocations for instance 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7 [ 882.520647] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.521437] env[62974]: DEBUG nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 882.528121] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.467s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.528121] env[62974]: INFO nova.compute.claims [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.634390] env[62974]: DEBUG oslo_vmware.api [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654552, 'name': PowerOffVM_Task, 'duration_secs': 0.240941} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.636472] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 882.636668] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 882.636999] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d877114-a45f-438d-b8f6-a09c5416c8c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.672219] env[62974]: DEBUG nova.network.neutron [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Updated VIF entry in instance network info cache for port 87b5b3a5-74b0-4465-a533-043f1f583030. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 882.672717] env[62974]: DEBUG nova.network.neutron [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Updating instance_info_cache with network_info: [{"id": "87b5b3a5-74b0-4465-a533-043f1f583030", "address": "fa:16:3e:4e:16:55", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": null, "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap87b5b3a5-74", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.710259] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 882.710259] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 882.710259] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleting the datastore file [datastore1] b31dea29-79d6-4117-bdb5-2d38fb660a53 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.710582] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca9ca3a4-4656-4bc3-aa4e-4af8c8ed5884 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.718457] env[62974]: DEBUG oslo_vmware.api [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for the task: (returnval){ [ 882.718457] env[62974]: value = "task-2654555" [ 882.718457] env[62974]: _type = "Task" [ 882.718457] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.735886] env[62974]: DEBUG oslo_vmware.api [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654555, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.813427] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 882.813777] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7fc149d5-da45-4437-aefe-f1b65395a178 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.824846] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 882.824846] env[62974]: value = "task-2654556" [ 882.824846] env[62974]: _type = "Task" [ 882.824846] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.837560] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.837888] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654553, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.843851] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.032241] env[62974]: DEBUG nova.compute.utils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 883.037848] env[62974]: DEBUG nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 883.037848] env[62974]: DEBUG nova.network.neutron [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.083184] env[62974]: DEBUG nova.policy [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '033399fccc4a453e97b3a6b5283f4371', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd36dea0d5474cc3a836975e86a1dd07', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 883.177620] env[62974]: DEBUG oslo_concurrency.lockutils [req-9dbf68de-4b79-4f28-b6cf-75593678bc51 req-a9484071-3e79-4cdd-b715-aa5128c439ba service nova] Releasing lock "refresh_cache-6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.233033] env[62974]: DEBUG nova.network.neutron [-] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.233033] env[62974]: DEBUG oslo_vmware.api [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Task: {'id': task-2654555, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24165} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.233033] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.233033] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 883.234452] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 883.234965] env[62974]: INFO nova.compute.manager [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Took 1.13 seconds to destroy the instance on the hypervisor. [ 883.235368] env[62974]: DEBUG oslo.service.loopingcall [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.236158] env[62974]: DEBUG nova.compute.manager [-] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 883.236403] env[62974]: DEBUG nova.network.neutron [-] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 883.338163] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654553, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.345032] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.541182] env[62974]: DEBUG nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 883.550215] env[62974]: DEBUG nova.network.neutron [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Successfully created port: 31c4426e-2cae-45ff-be26-c79cdd0db248 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.737803] env[62974]: INFO nova.compute.manager [-] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Took 1.67 seconds to deallocate network for instance. [ 883.839333] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654553, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.845322] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.926263] env[62974]: DEBUG nova.compute.manager [req-b9512fa4-29fa-49e4-bd7c-b26dceee1a2f req-96572a05-248d-4c97-90c0-9fa597d28d02 service nova] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Received event network-vif-deleted-1a9b17b6-73d0-4a26-aeb4-00390730c3b0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 883.926429] env[62974]: DEBUG nova.compute.manager [req-b9512fa4-29fa-49e4-bd7c-b26dceee1a2f req-96572a05-248d-4c97-90c0-9fa597d28d02 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Received event network-vif-deleted-abd131b8-9d9e-4230-b1d9-19c7a25bb78a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 883.926867] env[62974]: INFO nova.compute.manager [req-b9512fa4-29fa-49e4-bd7c-b26dceee1a2f req-96572a05-248d-4c97-90c0-9fa597d28d02 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Neutron deleted interface abd131b8-9d9e-4230-b1d9-19c7a25bb78a; detaching it from the instance and deleting it from the info cache [ 883.926991] env[62974]: DEBUG nova.network.neutron [req-b9512fa4-29fa-49e4-bd7c-b26dceee1a2f req-96572a05-248d-4c97-90c0-9fa597d28d02 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.932223] env[62974]: DEBUG oslo_concurrency.lockutils [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.932576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.023286] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023fc4ad-3bb6-441b-a844-2191de192fbb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.031722] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.039302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed61249e-cb9a-4488-87db-66c27daf62ea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.091932] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4d78c0-ddcb-4910-96f1-0e052ecdaf82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.102045] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87c3b7f-5c15-4b34-8a88-958cf903064c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.117474] env[62974]: DEBUG nova.compute.provider_tree [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.167137] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "c763d45b-44f0-4557-a726-7aad2bc58ba8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.168687] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.168687] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "c763d45b-44f0-4557-a726-7aad2bc58ba8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.168687] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.168687] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.170654] env[62974]: INFO nova.compute.manager [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Terminating instance [ 884.219486] env[62974]: DEBUG nova.network.neutron [-] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.245633] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.330501] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654553, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.340105] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.432151] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12202aa8-f7e7-4622-8320-1350a91fe5e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.435790] env[62974]: DEBUG nova.compute.utils [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.443851] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c94dd31-e319-4864-babe-dc1df1facc74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.483953] env[62974]: DEBUG nova.compute.manager [req-b9512fa4-29fa-49e4-bd7c-b26dceee1a2f req-96572a05-248d-4c97-90c0-9fa597d28d02 service nova] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Detach interface failed, port_id=abd131b8-9d9e-4230-b1d9-19c7a25bb78a, reason: Instance b31dea29-79d6-4117-bdb5-2d38fb660a53 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 884.593152] env[62974]: DEBUG nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 884.623028] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 884.623028] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.623028] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 884.623287] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.623287] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 884.623287] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 884.623287] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 884.623287] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 884.623449] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 884.623482] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 884.623657] env[62974]: DEBUG nova.virt.hardware [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 884.624594] env[62974]: DEBUG nova.scheduler.client.report [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 884.629592] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e857d4-f4dd-4e4b-a939-8ce4a68d427b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.639503] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ccf8ac-aa93-42cb-87d5-7c260be5c520 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.658394] env[62974]: DEBUG nova.objects.instance [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lazy-loading 'flavor' on Instance uuid 12c769fb-8c9e-4089-9563-232cfad89b21 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 884.679393] env[62974]: DEBUG nova.compute.manager [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.679567] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.680708] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8db61f-4211-4fe9-a3be-5e314bb2dbf7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.688774] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.689089] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-436e7c11-f0cf-4bc5-aaba-ebbb25306b69 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.697674] env[62974]: DEBUG oslo_vmware.api [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 884.697674] env[62974]: value = "task-2654558" [ 884.697674] env[62974]: _type = "Task" [ 884.697674] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.706606] env[62974]: DEBUG oslo_vmware.api [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.723444] env[62974]: INFO nova.compute.manager [-] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Took 1.49 seconds to deallocate network for instance. [ 884.832105] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654553, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.4924} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.836524] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83/dc9ab7c2-bb31-4c76-aa09-3ecc18818c83.vmdk to [datastore2] 70adaccf-44ab-44b1-ac8a-005d42c09f0a/70adaccf-44ab-44b1-ac8a-005d42c09f0a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.837352] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76b14fd-c182-4378-9e8c-66c35cefe41d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.846454] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654556, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.866578] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 70adaccf-44ab-44b1-ac8a-005d42c09f0a/70adaccf-44ab-44b1-ac8a-005d42c09f0a.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.869133] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41c1ea2e-afa2-4562-9296-2562b12c663a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.893423] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 884.893423] env[62974]: value = "task-2654559" [ 884.893423] env[62974]: _type = "Task" [ 884.893423] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.904516] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654559, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.942552] env[62974]: DEBUG oslo_concurrency.lockutils [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.059363] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Completed reading data from the image iterator. {{(pid=62974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 885.059642] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a975a5-ec9c-f82a-8889-0aab8e58a27b/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 885.060707] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7d31f0-30b3-4ea3-947f-e6a60e28a273 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.067282] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a975a5-ec9c-f82a-8889-0aab8e58a27b/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 885.067454] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a975a5-ec9c-f82a-8889-0aab8e58a27b/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 885.067730] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6b8cf52c-4f16-4e72-9b02-6d7b11750b4c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.134073] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.608s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.134634] env[62974]: DEBUG nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 885.137430] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.517s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.138872] env[62974]: INFO nova.compute.claims [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.166185] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.166185] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquired lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.212032] env[62974]: DEBUG oslo_vmware.api [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654558, 'name': PowerOffVM_Task, 'duration_secs': 0.236126} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.212211] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.212488] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.213207] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1305316d-1657-4fe5-b37e-af897ba3a602 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.229802] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.275222] env[62974]: DEBUG nova.compute.manager [req-216bf680-76e3-405b-a36b-f93553af3b8f req-d1194f37-a01e-4e21-aaa1-8e1df8c4ebcf service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Received event network-vif-plugged-31c4426e-2cae-45ff-be26-c79cdd0db248 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 885.275469] env[62974]: DEBUG oslo_concurrency.lockutils [req-216bf680-76e3-405b-a36b-f93553af3b8f req-d1194f37-a01e-4e21-aaa1-8e1df8c4ebcf service nova] Acquiring lock "79448002-daa3-4afd-bd1b-36d734642a9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.275680] env[62974]: DEBUG oslo_concurrency.lockutils [req-216bf680-76e3-405b-a36b-f93553af3b8f req-d1194f37-a01e-4e21-aaa1-8e1df8c4ebcf service nova] Lock "79448002-daa3-4afd-bd1b-36d734642a9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.275889] env[62974]: DEBUG oslo_concurrency.lockutils [req-216bf680-76e3-405b-a36b-f93553af3b8f req-d1194f37-a01e-4e21-aaa1-8e1df8c4ebcf service nova] Lock "79448002-daa3-4afd-bd1b-36d734642a9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.276135] env[62974]: DEBUG nova.compute.manager [req-216bf680-76e3-405b-a36b-f93553af3b8f req-d1194f37-a01e-4e21-aaa1-8e1df8c4ebcf service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] No waiting events found dispatching network-vif-plugged-31c4426e-2cae-45ff-be26-c79cdd0db248 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.276342] env[62974]: WARNING nova.compute.manager [req-216bf680-76e3-405b-a36b-f93553af3b8f req-d1194f37-a01e-4e21-aaa1-8e1df8c4ebcf service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Received unexpected event network-vif-plugged-31c4426e-2cae-45ff-be26-c79cdd0db248 for instance with vm_state building and task_state spawning. [ 885.278501] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.278693] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.278874] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Deleting the datastore file [datastore1] c763d45b-44f0-4557-a726-7aad2bc58ba8 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.279509] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d8d84b8-aeb4-4420-8a12-df0341d8a5c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.287234] env[62974]: DEBUG oslo_vmware.api [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for the task: (returnval){ [ 885.287234] env[62974]: value = "task-2654561" [ 885.287234] env[62974]: _type = "Task" [ 885.287234] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.297575] env[62974]: DEBUG oslo_vmware.api [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.340952] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654556, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.361647] env[62974]: DEBUG oslo_vmware.rw_handles [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a975a5-ec9c-f82a-8889-0aab8e58a27b/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 885.361854] env[62974]: INFO nova.virt.vmwareapi.images [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Downloaded image file data a51f6776-a571-4d03-938a-5a97a88c6d55 [ 885.362667] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f0d569-42df-460b-85cd-f865e88ad576 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.378570] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd0cbadb-5f08-4f8b-92c8-8fecadb9189c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.401717] env[62974]: INFO nova.virt.vmwareapi.images [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] The imported VM was unregistered [ 885.404276] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Caching image {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 885.404520] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Creating directory with path [datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.407838] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd49d265-7010-46a6-9d59-cef3c9ac6cb8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.409810] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654559, 'name': ReconfigVM_Task, 'duration_secs': 0.468916} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.410106] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 70adaccf-44ab-44b1-ac8a-005d42c09f0a/70adaccf-44ab-44b1-ac8a-005d42c09f0a.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.411106] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80c84a5b-0685-49db-b007-de8c1ed96551 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.416682] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 885.416682] env[62974]: value = "task-2654563" [ 885.416682] env[62974]: _type = "Task" [ 885.416682] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.423025] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Created directory with path [datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55 {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.423025] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917/OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917.vmdk to [datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55/a51f6776-a571-4d03-938a-5a97a88c6d55.vmdk. {{(pid=62974) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 885.424662] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-798d20fc-581c-46d8-bc93-9999d82d82e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.426505] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654563, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.431406] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 885.431406] env[62974]: value = "task-2654564" [ 885.431406] env[62974]: _type = "Task" [ 885.431406] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.440520] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654564, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.644128] env[62974]: DEBUG nova.compute.utils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 885.648218] env[62974]: DEBUG nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 885.648377] env[62974]: DEBUG nova.network.neutron [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 885.684805] env[62974]: DEBUG nova.network.neutron [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Successfully updated port: 31c4426e-2cae-45ff-be26-c79cdd0db248 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 885.719897] env[62974]: DEBUG nova.policy [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84861fd0e88640529eb573045514dff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39e59f58f7c24529bfce4bcc18cc7925', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 885.797424] env[62974]: DEBUG oslo_vmware.api [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Task: {'id': task-2654561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195251} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.797695] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.797884] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.798074] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.798260] env[62974]: INFO nova.compute.manager [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 885.798495] env[62974]: DEBUG oslo.service.loopingcall [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.798691] env[62974]: DEBUG nova.compute.manager [-] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 885.799678] env[62974]: DEBUG nova.network.neutron [-] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 885.842438] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654556, 'name': CreateSnapshot_Task, 'duration_secs': 2.553528} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.842781] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 885.843570] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e004a0d5-e88d-43a4-ac4f-1646646c9459 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.933270] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654563, 'name': Rename_Task, 'duration_secs': 0.156406} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.933270] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.933270] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a17e3cb2-9c65-4123-a031-853b0c7bab4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.945549] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654564, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.946936] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 885.946936] env[62974]: value = "task-2654565" [ 885.946936] env[62974]: _type = "Task" [ 885.946936] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.959298] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654565, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.961878] env[62974]: DEBUG nova.compute.manager [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Received event network-changed-31c4426e-2cae-45ff-be26-c79cdd0db248 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 885.962608] env[62974]: DEBUG nova.compute.manager [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Refreshing instance network info cache due to event network-changed-31c4426e-2cae-45ff-be26-c79cdd0db248. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 885.964706] env[62974]: DEBUG oslo_concurrency.lockutils [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] Acquiring lock "refresh_cache-79448002-daa3-4afd-bd1b-36d734642a9e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.964706] env[62974]: DEBUG oslo_concurrency.lockutils [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] Acquired lock "refresh_cache-79448002-daa3-4afd-bd1b-36d734642a9e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.964706] env[62974]: DEBUG nova.network.neutron [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Refreshing network info cache for port 31c4426e-2cae-45ff-be26-c79cdd0db248 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.011031] env[62974]: DEBUG nova.network.neutron [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.011031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.011031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.011539] env[62974]: INFO nova.compute.manager [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Attaching volume 63d19e7f-b8da-4842-a976-78d65b2d6e22 to /dev/sdb [ 886.082026] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d63905-7371-44b8-8c72-270c2109cff3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.091344] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76d0754-7d1c-4e59-9558-2803ad46f9ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.109919] env[62974]: DEBUG nova.virt.block_device [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating existing volume attachment record: ac803fba-080b-4570-8a5a-13f17c9e557f {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 886.149551] env[62974]: DEBUG nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 886.189109] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "refresh_cache-79448002-daa3-4afd-bd1b-36d734642a9e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.345871] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 886.347348] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535414', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'name': 'volume-e582231b-0f13-489f-96dd-9dd8e2561572', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e23dbff7-d23e-4909-9b33-67ed15c325e7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'serial': 'e582231b-0f13-489f-96dd-9dd8e2561572'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 886.347348] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb155f3-bc70-4014-99eb-2686dae605aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.369936] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 886.371328] env[62974]: DEBUG nova.network.neutron [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Successfully created port: 39690695-af5c-4491-9d0f-b5ea691ce54f {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.376383] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fd0df463-2381-4c63-9a69-59743d321db7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.379644] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e2bb13-ad7d-4ab2-bd8e-0105dbeac382 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.406124] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] volume-e582231b-0f13-489f-96dd-9dd8e2561572/volume-e582231b-0f13-489f-96dd-9dd8e2561572.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.410343] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ac7010f-7dd1-4372-a94a-274f330d8d9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.423650] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 886.423650] env[62974]: value = "task-2654568" [ 886.423650] env[62974]: _type = "Task" [ 886.423650] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.429803] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 886.429803] env[62974]: value = "task-2654570" [ 886.429803] env[62974]: _type = "Task" [ 886.429803] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.436925] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task} progress is 12%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.448497] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654570, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.453994] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654564, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.460576] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654565, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.550527] env[62974]: DEBUG nova.network.neutron [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.689631] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d98c45-6b73-4b42-9a65-c0d9f6e36f0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.700232] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe782b48-89c9-453e-b9e9-96da51acfb01 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.743939] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d30dab-828f-4dbc-a474-e00b6eac5244 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.749824] env[62974]: DEBUG nova.network.neutron [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.755552] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8276cf-a029-4ffe-905b-615a32a5f18d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.779196] env[62974]: DEBUG nova.compute.provider_tree [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.934670] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task} progress is 93%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.946654] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.950357] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654564, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.960767] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654565, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.129371] env[62974]: DEBUG oslo_concurrency.lockutils [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.129609] env[62974]: DEBUG oslo_concurrency.lockutils [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.129790] env[62974]: DEBUG nova.compute.manager [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 887.134161] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac9acd7-6f5a-4513-a3d8-6751b9acf8c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.139106] env[62974]: DEBUG nova.compute.manager [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 887.139706] env[62974]: DEBUG nova.objects.instance [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'flavor' on Instance uuid 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.163952] env[62974]: DEBUG nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 887.195159] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 887.195475] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.195566] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 887.195952] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.196468] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 887.196712] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 887.197069] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 887.197198] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 887.197424] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 887.197615] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 887.197821] env[62974]: DEBUG nova.virt.hardware [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 887.198935] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b040ef-ec4b-4b86-be90-87cd252c1fc4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.209570] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f36020-e2ac-49b2-a492-089428731e91 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.224841] env[62974]: DEBUG nova.network.neutron [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.255070] env[62974]: DEBUG oslo_concurrency.lockutils [req-3c4e63bb-e411-46cd-8aab-e70b675b4c97 req-591be5db-fdbb-4d2c-9183-c779eafe7b4f service nova] Releasing lock "refresh_cache-79448002-daa3-4afd-bd1b-36d734642a9e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.255484] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquired lock "refresh_cache-79448002-daa3-4afd-bd1b-36d734642a9e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.255692] env[62974]: DEBUG nova.network.neutron [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.279982] env[62974]: DEBUG nova.scheduler.client.report [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.308476] env[62974]: DEBUG nova.network.neutron [-] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.451700] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.466810] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.472237] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654564, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.475859] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654565, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.727921] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Releasing lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.728163] env[62974]: DEBUG nova.compute.manager [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Inject network info {{(pid=62974) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 887.728340] env[62974]: DEBUG nova.compute.manager [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] network_info to inject: |[{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 887.733636] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Reconfiguring VM instance to set the machine id {{(pid=62974) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 887.733904] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1262a3db-f5bc-4d35-98f8-e9814bc9729d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.751025] env[62974]: DEBUG oslo_vmware.api [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 887.751025] env[62974]: value = "task-2654571" [ 887.751025] env[62974]: _type = "Task" [ 887.751025] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.761658] env[62974]: DEBUG oslo_vmware.api [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654571, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.785342] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.785956] env[62974]: DEBUG nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 887.788969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.232s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.790539] env[62974]: INFO nova.compute.claims [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.802311] env[62974]: DEBUG nova.network.neutron [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.812552] env[62974]: INFO nova.compute.manager [-] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Took 2.01 seconds to deallocate network for instance. [ 887.943897] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.945294] env[62974]: DEBUG nova.network.neutron [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Updating instance_info_cache with network_info: [{"id": "31c4426e-2cae-45ff-be26-c79cdd0db248", "address": "fa:16:3e:37:aa:5a", "network": {"id": "4e60595a-a18f-455f-9286-43cf7c5c7c71", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-210969196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd36dea0d5474cc3a836975e86a1dd07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31c4426e-2c", "ovs_interfaceid": "31c4426e-2cae-45ff-be26-c79cdd0db248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.962697] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.975081] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654564, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.477925} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.975501] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654565, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.975630] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917/OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917.vmdk to [datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55/a51f6776-a571-4d03-938a-5a97a88c6d55.vmdk. [ 887.975843] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Cleaning up location [datastore2] OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 887.976058] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_09ded464-dd1c-4ab5-8b61-52cd77591917 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 887.976350] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-834cd1d2-f1fe-48fa-8955-9b695b4de6f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.983991] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 887.983991] env[62974]: value = "task-2654572" [ 887.983991] env[62974]: _type = "Task" [ 887.983991] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.993011] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.006516] env[62974]: DEBUG nova.compute.manager [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Received event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 888.006680] env[62974]: DEBUG nova.compute.manager [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing instance network info cache due to event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 888.006979] env[62974]: DEBUG oslo_concurrency.lockutils [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] Acquiring lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.007264] env[62974]: DEBUG oslo_concurrency.lockutils [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] Acquired lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.007467] env[62974]: DEBUG nova.network.neutron [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.151085] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.151085] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b09be99f-536e-4388-94e0-80ab37ea606b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.157517] env[62974]: DEBUG oslo_vmware.api [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 888.157517] env[62974]: value = "task-2654573" [ 888.157517] env[62974]: _type = "Task" [ 888.157517] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.166626] env[62974]: DEBUG oslo_vmware.api [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654573, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.192899] env[62974]: DEBUG nova.network.neutron [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Successfully updated port: 39690695-af5c-4491-9d0f-b5ea691ce54f {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.261759] env[62974]: DEBUG oslo_vmware.api [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654571, 'name': ReconfigVM_Task, 'duration_secs': 0.188559} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.262720] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c91d40-81fe-4f5b-ab64-390aaea9ffca tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Reconfigured VM instance to set the machine id {{(pid=62974) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 888.296127] env[62974]: DEBUG nova.compute.utils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 888.300512] env[62974]: DEBUG nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 888.302293] env[62974]: DEBUG nova.network.neutron [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.320725] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.342174] env[62974]: DEBUG nova.policy [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1113302add14eddae0e9236fd9cf1bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2048cc1cb7774806acd48b3b1a2615f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 888.438385] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.452646] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Releasing lock "refresh_cache-79448002-daa3-4afd-bd1b-36d734642a9e" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.452994] env[62974]: DEBUG nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Instance network_info: |[{"id": "31c4426e-2cae-45ff-be26-c79cdd0db248", "address": "fa:16:3e:37:aa:5a", "network": {"id": "4e60595a-a18f-455f-9286-43cf7c5c7c71", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-210969196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd36dea0d5474cc3a836975e86a1dd07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31c4426e-2c", "ovs_interfaceid": "31c4426e-2cae-45ff-be26-c79cdd0db248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 888.453410] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654570, 'name': ReconfigVM_Task, 'duration_secs': 1.723889} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.453717] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:aa:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31c4426e-2cae-45ff-be26-c79cdd0db248', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.461172] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Creating folder: Project (dd36dea0d5474cc3a836975e86a1dd07). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.464267] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfigured VM instance instance-00000046 to attach disk [datastore2] volume-e582231b-0f13-489f-96dd-9dd8e2561572/volume-e582231b-0f13-489f-96dd-9dd8e2561572.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.468788] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0f6afa1-eb31-4d66-973d-3ba3993b8fc9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.470386] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e408309-1de2-4ce8-b752-803b6331eb65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.486627] env[62974]: DEBUG oslo_vmware.api [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654565, 'name': PowerOnVM_Task, 'duration_secs': 2.104617} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.490479] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.490682] env[62974]: INFO nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Took 15.90 seconds to spawn the instance on the hypervisor. [ 888.490859] env[62974]: DEBUG nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.491207] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 888.491207] env[62974]: value = "task-2654576" [ 888.491207] env[62974]: _type = "Task" [ 888.491207] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.492768] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1920590b-fe9b-4afd-ad79-79168cea49df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.495165] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Created folder: Project (dd36dea0d5474cc3a836975e86a1dd07) in parent group-v535199. [ 888.495456] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Creating folder: Instances. Parent ref: group-v535419. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.498622] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e015d0b0-4b60-4680-8bc4-a7d3e4ef8a04 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.502991] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065372} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.503741] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 888.503923] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55/a51f6776-a571-4d03-938a-5a97a88c6d55.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.504212] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55/a51f6776-a571-4d03-938a-5a97a88c6d55.vmdk to [datastore2] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 888.505731] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4df62fb-7319-4659-8128-e8c95dcd2cc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.517361] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.517670] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Created folder: Instances in parent group-v535419. [ 888.517880] env[62974]: DEBUG oslo.service.loopingcall [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.518471] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.519892] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47c5c661-18ab-47a6-b9e6-e9faed179f05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.536616] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 888.536616] env[62974]: value = "task-2654578" [ 888.536616] env[62974]: _type = "Task" [ 888.536616] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.544752] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.544752] env[62974]: value = "task-2654579" [ 888.544752] env[62974]: _type = "Task" [ 888.544752] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.554039] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.559274] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654579, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.642237] env[62974]: DEBUG nova.network.neutron [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Successfully created port: 2236b408-d781-4e4d-96dd-2759f3457b38 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.668259] env[62974]: DEBUG oslo_vmware.api [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654573, 'name': PowerOffVM_Task, 'duration_secs': 0.191231} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.668589] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.668818] env[62974]: DEBUG nova.compute.manager [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.669718] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa89d3f-6f0a-42bd-a3bd-2978a5afbfa5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.696302] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.696302] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.696302] env[62974]: DEBUG nova.network.neutron [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.737573] env[62974]: DEBUG nova.objects.instance [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lazy-loading 'flavor' on Instance uuid 12c769fb-8c9e-4089-9563-232cfad89b21 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.800913] env[62974]: DEBUG nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 888.899966] env[62974]: DEBUG nova.network.neutron [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updated VIF entry in instance network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.900409] env[62974]: DEBUG nova.network.neutron [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.941709] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.008313] env[62974]: DEBUG oslo_vmware.api [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654576, 'name': ReconfigVM_Task, 'duration_secs': 0.204135} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.012365] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535414', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'name': 'volume-e582231b-0f13-489f-96dd-9dd8e2561572', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e23dbff7-d23e-4909-9b33-67ed15c325e7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'serial': 'e582231b-0f13-489f-96dd-9dd8e2561572'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 889.026963] env[62974]: INFO nova.compute.manager [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Took 41.56 seconds to build instance. [ 889.051436] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654578, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.060652] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654579, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.112357] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Acquiring lock "a14e7e40-afef-4607-8fa9-935a92ea49dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.112610] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.112912] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Acquiring lock "a14e7e40-afef-4607-8fa9-935a92ea49dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.113118] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.113366] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.116067] env[62974]: INFO nova.compute.manager [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Terminating instance [ 889.181959] env[62974]: DEBUG oslo_concurrency.lockutils [None req-00faa63a-24fc-42e7-bb40-ab2af6807ee3 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.052s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.221932] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050c136d-ed75-44ce-9dd5-ddd1d1c5b490 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.228211] env[62974]: DEBUG nova.network.neutron [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.233493] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090ae07b-dc00-48e7-82a7-a6d22ec9b964 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.268350] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.270179] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7623630-dc3d-4dda-99d2-54aa103c20c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.278921] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2659e312-6bc1-4374-918c-f24e1e27c1ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.295966] env[62974]: DEBUG nova.compute.provider_tree [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.403483] env[62974]: DEBUG oslo_concurrency.lockutils [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] Releasing lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.403740] env[62974]: DEBUG nova.compute.manager [req-63d33c73-4438-437f-9bea-f6bf7e9175b8 req-49301624-c4d9-4ffa-b9e5-1d5e0db9331b service nova] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Received event network-vif-deleted-30d94be9-1598-40ca-95b1-cfe821557367 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 889.404216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquired lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.442370] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.531716] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e90261f8-178f-4eed-bf20-3914470c6322 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.072s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.569209] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654578, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.580452] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654579, 'name': CreateVM_Task, 'duration_secs': 0.954645} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.583712] env[62974]: DEBUG nova.network.neutron [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance_info_cache with network_info: [{"id": "39690695-af5c-4491-9d0f-b5ea691ce54f", "address": "fa:16:3e:1c:c5:83", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39690695-af", "ovs_interfaceid": "39690695-af5c-4491-9d0f-b5ea691ce54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.594823] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.595188] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.595858] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.595858] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 889.596103] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc434472-d3c4-4d1f-abd5-92b3946c66cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.606134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.606134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.606134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.606134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.606407] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.612443] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 889.612443] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525372f2-23ec-8775-37cc-69385713d940" [ 889.612443] env[62974]: _type = "Task" [ 889.612443] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.612443] env[62974]: INFO nova.compute.manager [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Terminating instance [ 889.623617] env[62974]: DEBUG nova.compute.manager [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 889.623827] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.627273] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f8bdd0b-5634-47b7-bded-a1cc381fbb87 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.629492] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525372f2-23ec-8775-37cc-69385713d940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.637676] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 889.637676] env[62974]: value = "task-2654580" [ 889.637676] env[62974]: _type = "Task" [ 889.637676] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.649713] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.800088] env[62974]: DEBUG nova.scheduler.client.report [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.810265] env[62974]: DEBUG nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 889.821667] env[62974]: DEBUG nova.network.neutron [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.840728] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 889.840986] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.841205] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 889.841437] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.841969] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 889.841969] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 889.841969] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 889.842199] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 889.842329] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 889.842897] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 889.842897] env[62974]: DEBUG nova.virt.hardware [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 889.843921] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f01b18-59ca-471c-a6b0-bfd6ef44b7fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.853546] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6a3aeb-28c3-4bcb-a4a4-a068b59c4620 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.942471] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654568, 'name': CloneVM_Task, 'duration_secs': 3.415267} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.942854] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Created linked-clone VM from snapshot [ 889.943772] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85377a34-ecfa-42a7-b48f-5d61d0c1567f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.955558] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Uploading image 5d681a90-3310-451a-8a9f-42285b699971 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 889.985469] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 889.985469] env[62974]: value = "vm-535417" [ 889.985469] env[62974]: _type = "VirtualMachine" [ 889.985469] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 889.985821] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9803a64e-1b4f-4b9d-b4aa-dc8be34cb7ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.996116] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease: (returnval){ [ 889.996116] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f58cfa-941b-feda-ca4c-6162b55808bb" [ 889.996116] env[62974]: _type = "HttpNfcLease" [ 889.996116] env[62974]: } obtained for exporting VM: (result){ [ 889.996116] env[62974]: value = "vm-535417" [ 889.996116] env[62974]: _type = "VirtualMachine" [ 889.996116] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 889.996652] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the lease: (returnval){ [ 889.996652] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f58cfa-941b-feda-ca4c-6162b55808bb" [ 889.996652] env[62974]: _type = "HttpNfcLease" [ 889.996652] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 890.005909] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 890.005909] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f58cfa-941b-feda-ca4c-6162b55808bb" [ 890.005909] env[62974]: _type = "HttpNfcLease" [ 890.005909] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 890.051636] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654578, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.092108] env[62974]: DEBUG nova.objects.instance [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lazy-loading 'flavor' on Instance uuid e23dbff7-d23e-4909-9b33-67ed15c325e7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 890.096742] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.096742] env[62974]: DEBUG nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Instance network_info: |[{"id": "39690695-af5c-4491-9d0f-b5ea691ce54f", "address": "fa:16:3e:1c:c5:83", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39690695-af", "ovs_interfaceid": "39690695-af5c-4491-9d0f-b5ea691ce54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.097062] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:c5:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39690695-af5c-4491-9d0f-b5ea691ce54f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.103384] env[62974]: DEBUG oslo.service.loopingcall [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.105987] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.105987] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da3b1436-f32a-4310-ab25-f8bbf4df33e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.123488] env[62974]: DEBUG nova.compute.manager [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 890.123488] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 890.126850] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7646ed2a-3923-40cd-b474-96ed61f87f49 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.138674] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525372f2-23ec-8775-37cc-69385713d940, 'name': SearchDatastore_Task, 'duration_secs': 0.090681} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.144928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.145219] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.145457] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.145951] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.145951] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.146150] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.146501] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.146501] env[62974]: value = "task-2654582" [ 890.146501] env[62974]: _type = "Task" [ 890.146501] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.146501] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a15124a-4c0f-467c-a92e-d0afdca7360d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.149083] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60f5ad19-4fdc-4c2b-a03f-3c65b408d7bc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.158285] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654580, 'name': PowerOffVM_Task, 'duration_secs': 0.400457} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.158523] env[62974]: DEBUG nova.network.neutron [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Successfully updated port: 2236b408-d781-4e4d-96dd-2759f3457b38 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.160835] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.161084] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 890.161297] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535277', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'name': 'volume-580d4492-2e68-4792-86d6-404ee3e08942', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a14e7e40-afef-4607-8fa9-935a92ea49dc', 'attached_at': '', 'detached_at': '', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'serial': '580d4492-2e68-4792-86d6-404ee3e08942'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 890.162432] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 890.162432] env[62974]: value = "task-2654583" [ 890.162432] env[62974]: _type = "Task" [ 890.162432] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.166683] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d12e23-421a-4505-92f8-eb43a4f248b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.169564] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654582, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.171293] env[62974]: DEBUG nova.compute.manager [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Received event network-vif-plugged-39690695-af5c-4491-9d0f-b5ea691ce54f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 890.171483] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Acquiring lock "e11408df-466c-4101-b0cc-3621cda78a45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.171713] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Lock "e11408df-466c-4101-b0cc-3621cda78a45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.171895] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Lock "e11408df-466c-4101-b0cc-3621cda78a45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.172494] env[62974]: DEBUG nova.compute.manager [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] No waiting events found dispatching network-vif-plugged-39690695-af5c-4491-9d0f-b5ea691ce54f {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 890.172494] env[62974]: WARNING nova.compute.manager [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Received unexpected event network-vif-plugged-39690695-af5c-4491-9d0f-b5ea691ce54f for instance with vm_state building and task_state spawning. [ 890.172494] env[62974]: DEBUG nova.compute.manager [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Received event network-changed-39690695-af5c-4491-9d0f-b5ea691ce54f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 890.172632] env[62974]: DEBUG nova.compute.manager [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Refreshing instance network info cache due to event network-changed-39690695-af5c-4491-9d0f-b5ea691ce54f. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 890.172761] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Acquiring lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.172899] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Acquired lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.173244] env[62974]: DEBUG nova.network.neutron [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Refreshing network info cache for port 39690695-af5c-4491-9d0f-b5ea691ce54f {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.180085] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.180920] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.182209] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98637557-56c6-4043-86ce-6942abe0aa74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.202650] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4418c5e2-2107-41fd-a5e1-055ba62c0a97 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.211243] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.211821] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 890.211821] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52122810-04a1-abc7-2952-6404c468407e" [ 890.211821] env[62974]: _type = "Task" [ 890.211821] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.218720] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f6a3d2-3084-4d6f-9eba-5b013cba8ecc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.224939] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52122810-04a1-abc7-2952-6404c468407e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.242852] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed557a9-3ca9-4472-ac74-74fad5788313 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.263194] env[62974]: DEBUG nova.objects.instance [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'flavor' on Instance uuid 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 890.264631] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] The volume has not been displaced from its original location: [datastore2] volume-580d4492-2e68-4792-86d6-404ee3e08942/volume-580d4492-2e68-4792-86d6-404ee3e08942.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 890.270040] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 890.270688] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad1bd699-2a8e-475b-884b-87c9917d88e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.290647] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 890.290647] env[62974]: value = "task-2654584" [ 890.290647] env[62974]: _type = "Task" [ 890.290647] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.300620] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654584, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.306070] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.306727] env[62974]: DEBUG nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 890.309622] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.401s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.309622] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.311767] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.263s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.312013] env[62974]: DEBUG nova.objects.instance [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lazy-loading 'resources' on Instance uuid 85f8f79d-330a-49cd-b1ae-8de20c70fcab {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 890.346446] env[62974]: INFO nova.scheduler.client.report [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted allocations for instance af370de1-e4d7-4312-bc72-c6398eeaf2ed [ 890.507042] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 890.507042] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f58cfa-941b-feda-ca4c-6162b55808bb" [ 890.507042] env[62974]: _type = "HttpNfcLease" [ 890.507042] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 890.507524] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 890.507524] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f58cfa-941b-feda-ca4c-6162b55808bb" [ 890.507524] env[62974]: _type = "HttpNfcLease" [ 890.507524] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 890.508743] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8131acf1-ebfa-4090-946d-639ab5aa0246 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.517972] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5284e29a-4e38-2568-f47e-8f41883db2f2/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 890.518800] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5284e29a-4e38-2568-f47e-8f41883db2f2/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 890.596452] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654578, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.597024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-bfac6784-3cf0-4f96-af6a-e06097f9b9e4 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 11.393s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.623053] env[62974]: DEBUG nova.network.neutron [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.661287] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654582, 'name': CreateVM_Task, 'duration_secs': 0.405148} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.661438] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 890.663032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.663032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.663032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 890.663284] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "refresh_cache-c08ed924-9b7d-4773-8e49-c57ecfb27d03" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.663391] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquired lock "refresh_cache-c08ed924-9b7d-4773-8e49-c57ecfb27d03" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.663529] env[62974]: DEBUG nova.network.neutron [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.664988] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37c3c15d-c310-457c-8536-2a51cefb57aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.671192] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 890.671466] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535418', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'name': 'volume-63d19e7f-b8da-4842-a976-78d65b2d6e22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c1d0b90c-aa1c-485d-850d-a1495feac7c9', 'attached_at': '', 'detached_at': '', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'serial': '63d19e7f-b8da-4842-a976-78d65b2d6e22'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 890.671965] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 890.671965] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52760e31-0dfc-6a42-6e4e-14957f85f9d2" [ 890.671965] env[62974]: _type = "Task" [ 890.671965] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.673633] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28c780c-a35f-4657-b711-bcc229708501 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.707634] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8298afc9-8f84-41f8-8b7c-ed2294f6f888 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.712962] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52760e31-0dfc-6a42-6e4e-14957f85f9d2, 'name': SearchDatastore_Task, 'duration_secs': 0.012234} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.713301] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.714173] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.714412] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.714718] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.714784] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.716461] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.720457] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b67e5e8-a4dd-446a-9ac7-571dbc1bc60a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.745056] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] volume-63d19e7f-b8da-4842-a976-78d65b2d6e22/volume-63d19e7f-b8da-4842-a976-78d65b2d6e22.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.746408] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-666fd219-75db-4723-ba6f-45493ffef5aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.764642] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52122810-04a1-abc7-2952-6404c468407e, 'name': SearchDatastore_Task, 'duration_secs': 0.095289} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.764906] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.765079] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.766763] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a8c98e8-839d-433f-a595-8d67e7b6efda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.769279] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b8e31a3-e621-42dd-af4c-4a588035dcea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.775309] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.775509] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.775657] env[62974]: DEBUG nova.network.neutron [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.775827] env[62974]: DEBUG nova.objects.instance [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'info_cache' on Instance uuid 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 890.778509] env[62974]: DEBUG oslo_vmware.api [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 890.778509] env[62974]: value = "task-2654585" [ 890.778509] env[62974]: _type = "Task" [ 890.778509] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.785760] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 890.785760] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2faee-67c6-87bb-6d77-f572643eebea" [ 890.785760] env[62974]: _type = "Task" [ 890.785760] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.785904] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 890.785904] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ab7629-14bf-96b4-5404-ae24473ba5d2" [ 890.785904] env[62974]: _type = "Task" [ 890.785904] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.800055] env[62974]: DEBUG oslo_vmware.api [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654585, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.810211] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2faee-67c6-87bb-6d77-f572643eebea, 'name': SearchDatastore_Task, 'duration_secs': 0.01227} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.814940] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ab7629-14bf-96b4-5404-ae24473ba5d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.819371] env[62974]: DEBUG nova.compute.utils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.823799] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fc24696-8915-49a4-92d1-d5b24d34d728 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.826704] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654584, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.829529] env[62974]: DEBUG nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 890.829873] env[62974]: DEBUG nova.network.neutron [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 890.837587] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 890.837587] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529887ac-7bb1-f840-3415-e203d6f50a63" [ 890.837587] env[62974]: _type = "Task" [ 890.837587] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.847020] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529887ac-7bb1-f840-3415-e203d6f50a63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.864404] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2113c989-a958-4387-8107-cb35bcbaa7a1 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "af370de1-e4d7-4312-bc72-c6398eeaf2ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.486s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.899204] env[62974]: INFO nova.compute.manager [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Rescuing [ 890.899476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.899651] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.899801] env[62974]: DEBUG nova.network.neutron [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.911979] env[62974]: DEBUG nova.policy [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '861e5f8e6cd94fc7aeff2c22bd65df53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9de9a9e0393b4445a0ce8b5cd0df272f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 890.976062] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d1a28f37-c920-4f97-9b04-f8d538651062 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.058435] env[62974]: DEBUG nova.network.neutron [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updated VIF entry in instance network info cache for port 39690695-af5c-4491-9d0f-b5ea691ce54f. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.058866] env[62974]: DEBUG nova.network.neutron [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance_info_cache with network_info: [{"id": "39690695-af5c-4491-9d0f-b5ea691ce54f", "address": "fa:16:3e:1c:c5:83", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39690695-af", "ovs_interfaceid": "39690695-af5c-4491-9d0f-b5ea691ce54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.093790] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654578, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.126390] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Releasing lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.126682] env[62974]: DEBUG nova.compute.manager [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Inject network info {{(pid=62974) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 891.127227] env[62974]: DEBUG nova.compute.manager [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] network_info to inject: |[{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 891.132403] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Reconfiguring VM instance to set the machine id {{(pid=62974) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 891.136011] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e196d326-829c-4074-92d1-8f2d48c621a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.152897] env[62974]: DEBUG oslo_vmware.api [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 891.152897] env[62974]: value = "task-2654586" [ 891.152897] env[62974]: _type = "Task" [ 891.152897] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.163645] env[62974]: DEBUG oslo_vmware.api [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654586, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.185967] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654583, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.210703] env[62974]: DEBUG nova.network.neutron [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.279948] env[62974]: DEBUG nova.objects.base [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Object Instance<514e0f15-f27d-4fab-9107-b92884075420> lazy-loaded attributes: flavor,info_cache {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 891.287967] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14caf11-7384-4944-9a95-1c2aa7fa3ba4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.298228] env[62974]: DEBUG oslo_vmware.api [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654585, 'name': ReconfigVM_Task, 'duration_secs': 0.440465} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.299019] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Reconfigured VM instance instance-00000048 to attach disk [datastore1] volume-63d19e7f-b8da-4842-a976-78d65b2d6e22/volume-63d19e7f-b8da-4842-a976-78d65b2d6e22.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.306644] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cc88b25-a902-44c9-9f5a-946d2014c757 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.326681] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d56a435-0d7a-4580-9e0e-3024ede977a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.329662] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ab7629-14bf-96b4-5404-ae24473ba5d2, 'name': SearchDatastore_Task, 'duration_secs': 0.088428} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.330472] env[62974]: DEBUG nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 891.333867] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.334255] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 79448002-daa3-4afd-bd1b-36d734642a9e/79448002-daa3-4afd-bd1b-36d734642a9e.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 891.336634] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0dda27ee-2167-4c34-a4b7-a21c2ff06fde {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.367213] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654584, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.368495] env[62974]: DEBUG oslo_vmware.api [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 891.368495] env[62974]: value = "task-2654587" [ 891.368495] env[62974]: _type = "Task" [ 891.368495] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.369470] env[62974]: DEBUG nova.network.neutron [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Successfully created port: b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.377663] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1356be94-1491-47b1-aade-8a09fc7513b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.382820] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 891.382820] env[62974]: value = "task-2654588" [ 891.382820] env[62974]: _type = "Task" [ 891.382820] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.404017] env[62974]: DEBUG oslo_vmware.api [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654587, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.404017] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529887ac-7bb1-f840-3415-e203d6f50a63, 'name': SearchDatastore_Task, 'duration_secs': 0.017907} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.404017] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6d31fa-2701-4746-99f0-4308235035ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.406454] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.406820] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e11408df-466c-4101-b0cc-3621cda78a45/e11408df-466c-4101-b0cc-3621cda78a45.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 891.412781] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db11efe2-7c7c-42c8-8a39-218ee47683de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.415622] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.427682] env[62974]: DEBUG nova.compute.provider_tree [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.431220] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 891.431220] env[62974]: value = "task-2654589" [ 891.431220] env[62974]: _type = "Task" [ 891.431220] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.440413] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654589, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.455483] env[62974]: DEBUG nova.network.neutron [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Updating instance_info_cache with network_info: [{"id": "2236b408-d781-4e4d-96dd-2759f3457b38", "address": "fa:16:3e:60:96:e1", "network": {"id": "68e2d4be-3f08-4700-b7ac-d4592f220d8b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1408911914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2048cc1cb7774806acd48b3b1a2615f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2236b408-d7", "ovs_interfaceid": "2236b408-d781-4e4d-96dd-2759f3457b38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.466620] env[62974]: DEBUG oslo_concurrency.lockutils [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.466989] env[62974]: DEBUG oslo_concurrency.lockutils [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.563521] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Releasing lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.563778] env[62974]: DEBUG nova.compute.manager [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Received event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 891.563962] env[62974]: DEBUG nova.compute.manager [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing instance network info cache due to event network-changed-eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 891.564196] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Acquiring lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.564350] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Acquired lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.564623] env[62974]: DEBUG nova.network.neutron [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Refreshing network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.595450] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654578, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.705578} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.595831] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a51f6776-a571-4d03-938a-5a97a88c6d55/a51f6776-a571-4d03-938a-5a97a88c6d55.vmdk to [datastore2] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.597825] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da63b85-70a9-4969-a319-69ff44a89ba0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.628284] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.628799] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c27d99c3-f63c-4762-a714-e0b80d64929d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.652097] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 891.652097] env[62974]: value = "task-2654590" [ 891.652097] env[62974]: _type = "Task" [ 891.652097] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.665088] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654590, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.668437] env[62974]: DEBUG oslo_vmware.api [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654586, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.684989] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654583, 'name': PowerOffVM_Task, 'duration_secs': 1.091839} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.685396] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.685503] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.685785] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e8a2542-18e1-4d97-a773-208d5e609f3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.724704] env[62974]: DEBUG nova.network.neutron [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.774544] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.774544] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.774544] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleting the datastore file [datastore2] 70adaccf-44ab-44b1-ac8a-005d42c09f0a {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.774544] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c67736e-d27c-4837-abd7-9e91bb0cdabe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.781878] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 891.781878] env[62974]: value = "task-2654592" [ 891.781878] env[62974]: _type = "Task" [ 891.781878] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.792581] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.816310] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654584, 'name': ReconfigVM_Task, 'duration_secs': 1.425475} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.816661] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 891.821983] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9edc870b-c400-4a79-941c-937dfa247eb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.845776] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 891.845776] env[62974]: value = "task-2654593" [ 891.845776] env[62974]: _type = "Task" [ 891.845776] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.853932] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654593, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.888429] env[62974]: DEBUG oslo_vmware.api [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654587, 'name': ReconfigVM_Task, 'duration_secs': 0.151434} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.892246] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535418', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'name': 'volume-63d19e7f-b8da-4842-a976-78d65b2d6e22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c1d0b90c-aa1c-485d-850d-a1495feac7c9', 'attached_at': '', 'detached_at': '', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'serial': '63d19e7f-b8da-4842-a976-78d65b2d6e22'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 891.899344] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654588, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.931325] env[62974]: DEBUG nova.scheduler.client.report [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.944540] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654589, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.958702] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Releasing lock "refresh_cache-c08ed924-9b7d-4773-8e49-c57ecfb27d03" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.959279] env[62974]: DEBUG nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Instance network_info: |[{"id": "2236b408-d781-4e4d-96dd-2759f3457b38", "address": "fa:16:3e:60:96:e1", "network": {"id": "68e2d4be-3f08-4700-b7ac-d4592f220d8b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1408911914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2048cc1cb7774806acd48b3b1a2615f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2236b408-d7", "ovs_interfaceid": "2236b408-d781-4e4d-96dd-2759f3457b38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 891.959892] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:96:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0721b358-3768-472d-95f8-6d6755ab1635', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2236b408-d781-4e4d-96dd-2759f3457b38', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.971736] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Creating folder: Project (2048cc1cb7774806acd48b3b1a2615f5). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.974808] env[62974]: DEBUG nova.compute.utils [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 891.981026] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf4936c3-e989-4b32-b5a9-5d16a1cf3432 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.992463] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Created folder: Project (2048cc1cb7774806acd48b3b1a2615f5) in parent group-v535199. [ 891.992693] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Creating folder: Instances. Parent ref: group-v535423. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.994454] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-032e33ce-fe4d-44af-8e97-37ebd6cc5eb0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.006588] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Created folder: Instances in parent group-v535423. [ 892.007334] env[62974]: DEBUG oslo.service.loopingcall [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.007704] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.007939] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-654ddac6-43fc-473f-b0c9-14adf60c15fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.033421] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.033421] env[62974]: value = "task-2654596" [ 892.033421] env[62974]: _type = "Task" [ 892.033421] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.053788] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654596, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.173270] env[62974]: DEBUG oslo_vmware.api [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654586, 'name': ReconfigVM_Task, 'duration_secs': 0.634592} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.181268] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84d101-98a9-48fe-a0ee-b305e44c94fa tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Reconfigured VM instance to set the machine id {{(pid=62974) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 892.184460] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654590, 'name': ReconfigVM_Task, 'duration_secs': 0.409336} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.184810] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 366b5816-a847-48d1-ad03-5758e473a9d0/366b5816-a847-48d1-ad03-5758e473a9d0.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.186673] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4724c77d-dd1d-4179-a6e1-9cf1be3879f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.196309] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 892.196309] env[62974]: value = "task-2654597" [ 892.196309] env[62974]: _type = "Task" [ 892.196309] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.212044] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654597, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.227869] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.232375] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "12c769fb-8c9e-4089-9563-232cfad89b21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.232763] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "12c769fb-8c9e-4089-9563-232cfad89b21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.233221] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "12c769fb-8c9e-4089-9563-232cfad89b21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.233564] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "12c769fb-8c9e-4089-9563-232cfad89b21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.233869] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "12c769fb-8c9e-4089-9563-232cfad89b21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.239317] env[62974]: INFO nova.compute.manager [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Terminating instance [ 892.296836] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.310977] env[62974]: DEBUG nova.network.neutron [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.342931] env[62974]: DEBUG nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 892.355937] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654593, 'name': ReconfigVM_Task, 'duration_secs': 0.158519} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.358828] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535277', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'name': 'volume-580d4492-2e68-4792-86d6-404ee3e08942', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a14e7e40-afef-4607-8fa9-935a92ea49dc', 'attached_at': '', 'detached_at': '', 'volume_id': '580d4492-2e68-4792-86d6-404ee3e08942', 'serial': '580d4492-2e68-4792-86d6-404ee3e08942'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 892.359319] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.363031] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa23ac1c-966a-48d6-aef1-a0bf53acf825 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.369893] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.372352] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 892.372593] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.372838] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 892.373119] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.373296] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 892.373448] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 892.373654] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 892.373813] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 892.374081] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 892.374217] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 892.374384] env[62974]: DEBUG nova.virt.hardware [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 892.374665] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31d9708e-f797-498d-b5fb-80cd028cb666 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.376904] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70bd27f-d923-44ce-b651-b309c7a38747 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.385443] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86c3d9b-74b2-48a1-8dfd-914ab47dfb08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.414198] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654588, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.946225} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.414821] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 79448002-daa3-4afd-bd1b-36d734642a9e/79448002-daa3-4afd-bd1b-36d734642a9e.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 892.415083] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 892.415352] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4e0e7f1-872e-4e10-9036-fc093b5ecf02 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.422814] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 892.422814] env[62974]: value = "task-2654599" [ 892.422814] env[62974]: _type = "Task" [ 892.422814] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.432792] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.439527] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.127s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.443235] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.269s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.443235] env[62974]: DEBUG nova.objects.instance [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lazy-loading 'resources' on Instance uuid 6928b412-e8cb-42fb-bc47-dc8498f12ad1 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.456336] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654589, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.91991} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.458813] env[62974]: DEBUG nova.network.neutron [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updated VIF entry in instance network info cache for port eaf8ac28-c7f5-4462-9003-c34a22eb0f00. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.459436] env[62974]: DEBUG nova.network.neutron [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [{"id": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "address": "fa:16:3e:9c:ab:01", "network": {"id": "1b1342e3-73ef-4c89-b3d8-60b216cfe7ba", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1055357862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17bfed0a840e43b18856a7a33ec4bafc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8ac28-c7", "ovs_interfaceid": "eaf8ac28-c7f5-4462-9003-c34a22eb0f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.461292] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e11408df-466c-4101-b0cc-3621cda78a45/e11408df-466c-4101-b0cc-3621cda78a45.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 892.461489] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 892.463178] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 892.463567] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 892.464384] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Deleting the datastore file [datastore2] a14e7e40-afef-4607-8fa9-935a92ea49dc {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.464384] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e09744c3-cce7-48f7-9da5-85be61278c71 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.467759] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-474b963b-86cb-4229-8042-43cc55769023 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.471944] env[62974]: INFO nova.scheduler.client.report [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted allocations for instance 85f8f79d-330a-49cd-b1ae-8de20c70fcab [ 892.480147] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for the task: (returnval){ [ 892.480147] env[62974]: value = "task-2654601" [ 892.480147] env[62974]: _type = "Task" [ 892.480147] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.483055] env[62974]: DEBUG oslo_concurrency.lockutils [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.483975] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 892.483975] env[62974]: value = "task-2654600" [ 892.483975] env[62974]: _type = "Task" [ 892.483975] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.499768] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654601, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.503585] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654600, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.543722] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654596, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.715744] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654597, 'name': Rename_Task, 'duration_secs': 0.18307} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.716266] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.716627] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64de3e92-bb31-4d7c-acc9-578b4257862e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.724403] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 892.724403] env[62974]: value = "task-2654602" [ 892.724403] env[62974]: _type = "Task" [ 892.724403] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.732451] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.746055] env[62974]: DEBUG nova.compute.manager [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 892.746293] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.747708] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d827a2db-6af1-4729-90ad-7797d5cd5902 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.755335] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.755767] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cd89cdd-74ea-4474-92f6-842d0b58dcff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.762407] env[62974]: DEBUG oslo_vmware.api [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 892.762407] env[62974]: value = "task-2654603" [ 892.762407] env[62974]: _type = "Task" [ 892.762407] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.772656] env[62974]: DEBUG oslo_vmware.api [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.797992] env[62974]: DEBUG oslo_vmware.api [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.718425} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.798482] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.798691] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.799291] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.799544] env[62974]: INFO nova.compute.manager [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Took 2.68 seconds to destroy the instance on the hypervisor. [ 892.800160] env[62974]: DEBUG oslo.service.loopingcall [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.800386] env[62974]: DEBUG nova.compute.manager [-] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 892.800483] env[62974]: DEBUG nova.network.neutron [-] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.813985] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.934694] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079064} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.935074] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.936012] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23219cb7-810c-48eb-b2a8-68758bcb407d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.964850] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 79448002-daa3-4afd-bd1b-36d734642a9e/79448002-daa3-4afd-bd1b-36d734642a9e.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.966580] env[62974]: DEBUG nova.objects.instance [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.968492] env[62974]: DEBUG oslo_concurrency.lockutils [req-17a11f29-4538-49a3-9ad9-9175bef72331 req-71e1fc8c-6cde-48b1-97d3-a30cd070ac52 service nova] Releasing lock "refresh_cache-12c769fb-8c9e-4089-9563-232cfad89b21" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.968995] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-955a5e31-0e98-488e-9122-6f936fb26ecb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.993251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0dff4b20-97f7-4433-aaeb-6ccfaf018531 tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "85f8f79d-330a-49cd-b1ae-8de20c70fcab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.545s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.996636] env[62974]: DEBUG nova.compute.manager [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Received event network-vif-plugged-2236b408-d781-4e4d-96dd-2759f3457b38 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 892.996875] env[62974]: DEBUG oslo_concurrency.lockutils [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] Acquiring lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.997496] env[62974]: DEBUG oslo_concurrency.lockutils [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.997496] env[62974]: DEBUG oslo_concurrency.lockutils [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.997496] env[62974]: DEBUG nova.compute.manager [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] No waiting events found dispatching network-vif-plugged-2236b408-d781-4e4d-96dd-2759f3457b38 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.997884] env[62974]: WARNING nova.compute.manager [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Received unexpected event network-vif-plugged-2236b408-d781-4e4d-96dd-2759f3457b38 for instance with vm_state building and task_state spawning. [ 892.997884] env[62974]: DEBUG nova.compute.manager [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Received event network-changed-2236b408-d781-4e4d-96dd-2759f3457b38 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 892.997953] env[62974]: DEBUG nova.compute.manager [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Refreshing instance network info cache due to event network-changed-2236b408-d781-4e4d-96dd-2759f3457b38. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 892.999276] env[62974]: DEBUG oslo_concurrency.lockutils [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] Acquiring lock "refresh_cache-c08ed924-9b7d-4773-8e49-c57ecfb27d03" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.999529] env[62974]: DEBUG oslo_concurrency.lockutils [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] Acquired lock "refresh_cache-c08ed924-9b7d-4773-8e49-c57ecfb27d03" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.999596] env[62974]: DEBUG nova.network.neutron [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Refreshing network info cache for port 2236b408-d781-4e4d-96dd-2759f3457b38 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 893.009658] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 893.009658] env[62974]: value = "task-2654604" [ 893.009658] env[62974]: _type = "Task" [ 893.009658] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.021636] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654600, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065736} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.021960] env[62974]: DEBUG oslo_vmware.api [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Task: {'id': task-2654601, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201359} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.022668] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 893.023167] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.023425] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.024223] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.024223] env[62974]: INFO nova.compute.manager [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Took 3.40 seconds to destroy the instance on the hypervisor. [ 893.024223] env[62974]: DEBUG oslo.service.loopingcall [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.028981] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3621b3-0289-444a-81b2-e66a3cda53df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.032090] env[62974]: DEBUG nova.compute.manager [-] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 893.032241] env[62974]: DEBUG nova.network.neutron [-] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 893.034384] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654604, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.062528] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] e11408df-466c-4101-b0cc-3621cda78a45/e11408df-466c-4101-b0cc-3621cda78a45.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.070481] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01974788-bc74-40c7-bd3d-cd7b4e6472fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.091291] env[62974]: DEBUG nova.network.neutron [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Successfully updated port: b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 893.093116] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654596, 'name': CreateVM_Task, 'duration_secs': 0.728362} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.093917] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.096427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.096427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.096427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.099337] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9a2b241-4969-4e1e-b883-ebf3f4d460e0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.102815] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 893.102815] env[62974]: value = "task-2654605" [ 893.102815] env[62974]: _type = "Task" [ 893.102815] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.109614] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 893.109614] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af86ef-0f22-ae2c-dea9-d3ecf9f49fbd" [ 893.109614] env[62974]: _type = "Task" [ 893.109614] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.118156] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654605, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.126879] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52af86ef-0f22-ae2c-dea9-d3ecf9f49fbd, 'name': SearchDatastore_Task, 'duration_secs': 0.015095} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.127895] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.127895] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.128048] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.128228] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.128376] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.128722] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd1fe03f-d36e-401f-a463-c1da93a6c1b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.139350] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.139350] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.140181] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acd5a5bb-7444-4c09-a20a-1b111cbfc272 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.145767] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 893.145767] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b353eb-39aa-2422-7d2f-5b759de987e3" [ 893.145767] env[62974]: _type = "Task" [ 893.145767] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.156220] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b353eb-39aa-2422-7d2f-5b759de987e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.235277] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654602, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.277334] env[62974]: DEBUG oslo_vmware.api [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654603, 'name': PowerOffVM_Task, 'duration_secs': 0.223774} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.282086] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.282086] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.282363] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05fc8d1f-726a-46ff-abca-d38fdcea1189 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.350187] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.350419] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.350622] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Deleting the datastore file [datastore1] 12c769fb-8c9e-4089-9563-232cfad89b21 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.350916] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b25587a5-4d78-42e4-950b-23ef8e4d46dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.358060] env[62974]: DEBUG oslo_vmware.api [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for the task: (returnval){ [ 893.358060] env[62974]: value = "task-2654607" [ 893.358060] env[62974]: _type = "Task" [ 893.358060] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.367201] env[62974]: DEBUG oslo_vmware.api [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.482147] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46532ff-6ab0-4c2b-8bc6-bd26b5213f84 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.493265] env[62974]: DEBUG oslo_concurrency.lockutils [None req-239e7727-27f5-496a-b0ce-90a4267cfd95 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.482s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.495281] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5271bfb1-84e6-4bbb-9703-b0b7707f034c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.543821] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f4f559-c5f4-4ae7-8dd9-f945cdeddba4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.553808] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654604, 'name': ReconfigVM_Task, 'duration_secs': 0.521248} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.556790] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 79448002-daa3-4afd-bd1b-36d734642a9e/79448002-daa3-4afd-bd1b-36d734642a9e.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.558196] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9f0b911-bc91-480a-8bb0-3d7bc2fa3345 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.562612] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016f5ae0-0629-4d95-9233-2ad0aed0aed6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.584423] env[62974]: DEBUG nova.compute.provider_tree [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.590041] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 893.590041] env[62974]: value = "task-2654608" [ 893.590041] env[62974]: _type = "Task" [ 893.590041] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.594569] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "refresh_cache-eb8647c7-f5e1-4de5-8321-9a9ecff5961c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.594681] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired lock "refresh_cache-eb8647c7-f5e1-4de5-8321-9a9ecff5961c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.594841] env[62974]: DEBUG nova.network.neutron [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.596934] env[62974]: DEBUG oslo_concurrency.lockutils [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.597362] env[62974]: DEBUG oslo_concurrency.lockutils [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.597502] env[62974]: INFO nova.compute.manager [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Attaching volume 13787642-ed9f-449c-b672-b1b3b50942b0 to /dev/sdb [ 893.607422] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654608, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.620564] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654605, 'name': ReconfigVM_Task, 'duration_secs': 0.471147} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.621906] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Reconfigured VM instance instance-0000004f to attach disk [datastore1] e11408df-466c-4101-b0cc-3621cda78a45/e11408df-466c-4101-b0cc-3621cda78a45.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.621906] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f314d64-8735-4802-a569-93da71b5cb6d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.628935] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 893.628935] env[62974]: value = "task-2654609" [ 893.628935] env[62974]: _type = "Task" [ 893.628935] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.640292] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654609, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.652751] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace529f5-6619-4f18-8ad3-5d53a9cb371f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.666167] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b353eb-39aa-2422-7d2f-5b759de987e3, 'name': SearchDatastore_Task, 'duration_secs': 0.014248} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.671973] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-761ff97c-16d2-45c9-bd98-52254373d0c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.676780] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e18fb4-94da-4b91-8fbf-97ef3418a425 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.683029] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 893.683029] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52293dfb-950e-9e29-f328-a9ec93c66a39" [ 893.683029] env[62974]: _type = "Task" [ 893.683029] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.692504] env[62974]: DEBUG nova.network.neutron [-] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.693772] env[62974]: DEBUG nova.virt.block_device [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updating existing volume attachment record: f6b9cdc6-d489-4cd7-8b33-24f1aa3ffced {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 893.704384] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52293dfb-950e-9e29-f328-a9ec93c66a39, 'name': SearchDatastore_Task, 'duration_secs': 0.012498} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.704549] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.704902] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c08ed924-9b7d-4773-8e49-c57ecfb27d03/c08ed924-9b7d-4773-8e49-c57ecfb27d03.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.705281] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52f7d32c-f331-41e7-bbb0-afd92df1a2d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.713815] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 893.713815] env[62974]: value = "task-2654610" [ 893.713815] env[62974]: _type = "Task" [ 893.713815] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.723336] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.734099] env[62974]: DEBUG oslo_vmware.api [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654602, 'name': PowerOnVM_Task, 'duration_secs': 0.561367} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.736893] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.785384] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.785673] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-968f93b2-c3e3-4707-90b0-8852cd638fee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.794286] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 893.794286] env[62974]: value = "task-2654611" [ 893.794286] env[62974]: _type = "Task" [ 893.794286] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.802889] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.809991] env[62974]: DEBUG nova.network.neutron [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Updated VIF entry in instance network info cache for port 2236b408-d781-4e4d-96dd-2759f3457b38. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.809991] env[62974]: DEBUG nova.network.neutron [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Updating instance_info_cache with network_info: [{"id": "2236b408-d781-4e4d-96dd-2759f3457b38", "address": "fa:16:3e:60:96:e1", "network": {"id": "68e2d4be-3f08-4700-b7ac-d4592f220d8b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1408911914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2048cc1cb7774806acd48b3b1a2615f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0721b358-3768-472d-95f8-6d6755ab1635", "external-id": "nsx-vlan-transportzone-314", "segmentation_id": 314, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2236b408-d7", "ovs_interfaceid": "2236b408-d781-4e4d-96dd-2759f3457b38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.823502] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.824339] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c97d973a-de7e-408a-85e4-5fb4855d7a2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.833269] env[62974]: DEBUG oslo_vmware.api [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 893.833269] env[62974]: value = "task-2654612" [ 893.833269] env[62974]: _type = "Task" [ 893.833269] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.844017] env[62974]: DEBUG oslo_vmware.api [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654612, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.874782] env[62974]: DEBUG oslo_vmware.api [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Task: {'id': task-2654607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253083} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.874782] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.874782] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.874782] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.874782] env[62974]: INFO nova.compute.manager [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Took 1.13 seconds to destroy the instance on the hypervisor. [ 893.874946] env[62974]: DEBUG oslo.service.loopingcall [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.874946] env[62974]: DEBUG nova.compute.manager [-] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 893.874946] env[62974]: DEBUG nova.network.neutron [-] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 893.877532] env[62974]: DEBUG nova.compute.manager [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 893.878440] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d13c61d-b28e-4e97-a085-a2b8c8ca7cf3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.091962] env[62974]: DEBUG nova.scheduler.client.report [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.119865] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654608, 'name': Rename_Task, 'duration_secs': 0.184311} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.119865] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.119946] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ade2fc7-1be6-42f4-9b2b-19c890b3a31d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.131585] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 894.131585] env[62974]: value = "task-2654615" [ 894.131585] env[62974]: _type = "Task" [ 894.131585] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.150269] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654615, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.150619] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654609, 'name': Rename_Task, 'duration_secs': 0.225248} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.157487] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.157487] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c748368-aabf-433a-bd66-149b539e682f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.157487] env[62974]: DEBUG nova.network.neutron [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.159741] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.160032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.160227] env[62974]: DEBUG nova.compute.manager [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.161428] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92cc399-1821-45ef-9693-abe8da561ccb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.167396] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 894.167396] env[62974]: value = "task-2654617" [ 894.167396] env[62974]: _type = "Task" [ 894.167396] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.172882] env[62974]: DEBUG nova.compute.manager [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 894.173349] env[62974]: DEBUG nova.objects.instance [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.186654] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "b3827c67-9075-4a53-9f9e-8651e3f4b211" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.186933] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.187943] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "b3827c67-9075-4a53-9f9e-8651e3f4b211-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.188115] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.188303] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.190517] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654617, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.191035] env[62974]: INFO nova.compute.manager [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Terminating instance [ 894.199578] env[62974]: INFO nova.compute.manager [-] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Took 1.40 seconds to deallocate network for instance. [ 894.229793] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.307639] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.312608] env[62974]: DEBUG oslo_concurrency.lockutils [req-55532a06-64ac-4b5e-9be1-44644978f547 req-8f00d430-5682-49a3-9b1e-57cd2afd5d08 service nova] Releasing lock "refresh_cache-c08ed924-9b7d-4773-8e49-c57ecfb27d03" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.345750] env[62974]: DEBUG oslo_vmware.api [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654612, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.398739] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c05fe82e-2b93-41de-bca3-3e226ab512ae tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 46.299s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.609252] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.609252] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.717s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.612499] env[62974]: INFO nova.compute.claims [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.630650] env[62974]: DEBUG nova.network.neutron [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Updating instance_info_cache with network_info: [{"id": "b3dc1eb5-896e-4ba2-a50d-0626b0e5f490", "address": "fa:16:3e:ee:7c:ac", "network": {"id": "3518cb90-bb1f-4059-9f82-0f81c27ff829", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-912534611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de9a9e0393b4445a0ce8b5cd0df272f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3dc1eb5-89", "ovs_interfaceid": "b3dc1eb5-896e-4ba2-a50d-0626b0e5f490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.643755] env[62974]: INFO nova.scheduler.client.report [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Deleted allocations for instance 6928b412-e8cb-42fb-bc47-dc8498f12ad1 [ 894.653410] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654615, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.653769] env[62974]: DEBUG nova.network.neutron [-] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.680481] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654617, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.695329] env[62974]: DEBUG nova.compute.manager [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 894.695539] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 894.697145] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a6d34a-a33d-470a-a8df-69ab0b631b59 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.707548] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.708961] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.708961] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb6136dd-03a0-465e-bf42-0ad0d806e45f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.718022] env[62974]: DEBUG oslo_vmware.api [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 894.718022] env[62974]: value = "task-2654618" [ 894.718022] env[62974]: _type = "Task" [ 894.718022] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.735367] env[62974]: DEBUG nova.compute.manager [req-6fc50580-b8d1-4f75-87ee-d69fd734ae59 req-41cab14c-cd63-433d-a5cb-f1d3a6b56c3c service nova] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Received event network-vif-deleted-8ad12b0d-168c-4485-b856-6649ee5fe3a4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 894.748817] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.749602] env[62974]: DEBUG oslo_vmware.api [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.806701] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.807077] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.807326] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.848023] env[62974]: DEBUG oslo_vmware.api [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654612, 'name': PowerOnVM_Task, 'duration_secs': 0.732953} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.848023] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.848023] env[62974]: DEBUG nova.compute.manager [None req-f62b2114-8c73-470f-96d2-192129cd7e2e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.848489] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d16149-5827-4200-9a61-8b2fdadcd557 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.134413] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Releasing lock "refresh_cache-eb8647c7-f5e1-4de5-8321-9a9ecff5961c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.134873] env[62974]: DEBUG nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Instance network_info: |[{"id": "b3dc1eb5-896e-4ba2-a50d-0626b0e5f490", "address": "fa:16:3e:ee:7c:ac", "network": {"id": "3518cb90-bb1f-4059-9f82-0f81c27ff829", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-912534611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de9a9e0393b4445a0ce8b5cd0df272f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3dc1eb5-89", "ovs_interfaceid": "b3dc1eb5-896e-4ba2-a50d-0626b0e5f490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 895.135637] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:7c:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3dc1eb5-896e-4ba2-a50d-0626b0e5f490', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.143871] env[62974]: DEBUG oslo.service.loopingcall [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.144688] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.148725] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b84d5eb-f8ed-4340-9365-d4479ae8c20d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.169466] env[62974]: INFO nova.compute.manager [-] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Took 2.14 seconds to deallocate network for instance. [ 895.174041] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84572bda-6421-4a66-9db8-062219d15b3e tempest-SecurityGroupsTestJSON-995131817 tempest-SecurityGroupsTestJSON-995131817-project-member] Lock "6928b412-e8cb-42fb-bc47-dc8498f12ad1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.453s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.181861] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654615, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.183562] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.183562] env[62974]: value = "task-2654619" [ 895.183562] env[62974]: _type = "Task" [ 895.183562] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.192264] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.192485] env[62974]: DEBUG oslo_vmware.api [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654617, 'name': PowerOnVM_Task, 'duration_secs': 0.594727} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.193536] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-988e54c7-1e4c-480a-a127-22c1e92fd91d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.195750] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 895.195998] env[62974]: INFO nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Took 8.03 seconds to spawn the instance on the hypervisor. [ 895.196206] env[62974]: DEBUG nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 895.197721] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c712b64-1af4-45af-8a7a-202eaaa293c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.205227] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654619, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.214546] env[62974]: DEBUG oslo_vmware.api [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 895.214546] env[62974]: value = "task-2654620" [ 895.214546] env[62974]: _type = "Task" [ 895.214546] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.234025] env[62974]: DEBUG oslo_vmware.api [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.234297] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654610, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.241785] env[62974]: DEBUG oslo_vmware.api [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654618, 'name': PowerOffVM_Task, 'duration_secs': 0.290545} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.242218] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.242310] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.244617] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40daf6c5-1e18-406f-bf6e-edefdb779133 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.263413] env[62974]: DEBUG nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Received event network-vif-plugged-b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 895.263680] env[62974]: DEBUG oslo_concurrency.lockutils [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] Acquiring lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.263901] env[62974]: DEBUG oslo_concurrency.lockutils [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.264530] env[62974]: DEBUG oslo_concurrency.lockutils [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.264530] env[62974]: DEBUG nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] No waiting events found dispatching network-vif-plugged-b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 895.264530] env[62974]: WARNING nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Received unexpected event network-vif-plugged-b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 for instance with vm_state building and task_state spawning. [ 895.265091] env[62974]: DEBUG nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Received event network-changed-b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 895.265091] env[62974]: DEBUG nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Refreshing instance network info cache due to event network-changed-b3dc1eb5-896e-4ba2-a50d-0626b0e5f490. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 895.265161] env[62974]: DEBUG oslo_concurrency.lockutils [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] Acquiring lock "refresh_cache-eb8647c7-f5e1-4de5-8321-9a9ecff5961c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.266306] env[62974]: DEBUG oslo_concurrency.lockutils [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] Acquired lock "refresh_cache-eb8647c7-f5e1-4de5-8321-9a9ecff5961c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.266306] env[62974]: DEBUG nova.network.neutron [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Refreshing network info cache for port b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.267688] env[62974]: DEBUG nova.network.neutron [-] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.307629] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654611, 'name': PowerOffVM_Task, 'duration_secs': 1.238147} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.307629] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.307629] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e53e5e8-38fa-4db1-aca5-f0e377ff714c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.318794] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.318794] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.318794] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleting the datastore file [datastore1] b3827c67-9075-4a53-9f9e-8651e3f4b211 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.318794] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9311ea80-bd53-47e5-9b7e-77970fdfdb84 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.323018] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.323018] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 895.347901] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9dabf6-f387-4bb9-8f65-ce0bccac69e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.356017] env[62974]: DEBUG oslo_vmware.api [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for the task: (returnval){ [ 895.356017] env[62974]: value = "task-2654622" [ 895.356017] env[62974]: _type = "Task" [ 895.356017] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.373121] env[62974]: DEBUG oslo_vmware.api [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.400710] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.401054] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecbd2638-05eb-45ce-99c5-a7041778807c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.419982] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 895.419982] env[62974]: value = "task-2654623" [ 895.419982] env[62974]: _type = "Task" [ 895.419982] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.431490] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 895.431490] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.431714] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.431751] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.431918] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.432192] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ad6949d-458a-420c-b3fe-92737455d3b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.442220] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.442848] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.443283] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f62fa8b4-641d-40c5-912c-c0a2cb0e0012 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.449857] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 895.449857] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527ba778-1987-448a-bb20-8becd32491f0" [ 895.449857] env[62974]: _type = "Task" [ 895.449857] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.462535] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527ba778-1987-448a-bb20-8becd32491f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.659978] env[62974]: DEBUG oslo_vmware.api [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654615, 'name': PowerOnVM_Task, 'duration_secs': 1.355571} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.660376] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 895.660675] env[62974]: INFO nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Took 11.07 seconds to spawn the instance on the hypervisor. [ 895.660966] env[62974]: DEBUG nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 895.662287] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f86e94-ccf9-4341-a837-be214dfa0b1c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.695736] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654619, 'name': CreateVM_Task, 'duration_secs': 0.467414} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.695900] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 895.696606] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.696762] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.697082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 895.697322] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-376683ff-2dda-4b71-862b-8f19509518a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.703169] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 895.703169] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52416df0-b17d-2f8c-d32b-461a9cad2275" [ 895.703169] env[62974]: _type = "Task" [ 895.703169] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.710966] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52416df0-b17d-2f8c-d32b-461a9cad2275, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.728064] env[62974]: INFO nova.compute.manager [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Took 25.69 seconds to build instance. [ 895.737367] env[62974]: DEBUG oslo_vmware.api [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654620, 'name': PowerOffVM_Task, 'duration_secs': 0.246805} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.740606] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.740831] env[62974]: DEBUG nova.compute.manager [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 895.741131] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654610, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.527455} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.742264] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568e3a70-8f58-407a-8ae8-1fd5de3faef8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.744524] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c08ed924-9b7d-4773-8e49-c57ecfb27d03/c08ed924-9b7d-4773-8e49-c57ecfb27d03.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.744738] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.748279] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84d4ee15-bddf-4dfc-a4bd-cfbfd00f048b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.758219] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 895.758219] env[62974]: value = "task-2654624" [ 895.758219] env[62974]: _type = "Task" [ 895.758219] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.766108] env[62974]: INFO nova.compute.manager [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Took 0.60 seconds to detach 1 volumes for instance. [ 895.768987] env[62974]: DEBUG nova.compute.manager [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Deleting volume: 580d4492-2e68-4792-86d6-404ee3e08942 {{(pid=62974) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 895.775661] env[62974]: INFO nova.compute.manager [-] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Took 1.90 seconds to deallocate network for instance. [ 895.780594] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.868508] env[62974]: DEBUG oslo_vmware.api [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Task: {'id': task-2654622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26194} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.870202] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.870202] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.870358] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.870551] env[62974]: INFO nova.compute.manager [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Took 1.18 seconds to destroy the instance on the hypervisor. [ 895.870784] env[62974]: DEBUG oslo.service.loopingcall [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.871218] env[62974]: DEBUG nova.compute.manager [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 895.871313] env[62974]: DEBUG nova.network.neutron [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.961349] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527ba778-1987-448a-bb20-8becd32491f0, 'name': SearchDatastore_Task, 'duration_secs': 0.012305} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.967316] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd80a8e1-805b-4d0e-8f35-0174fe120e00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.975116] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 895.975116] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5209af1d-f217-f28f-2d77-bb41310e348b" [ 895.975116] env[62974]: _type = "Task" [ 895.975116] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.986868] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5209af1d-f217-f28f-2d77-bb41310e348b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.095262] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf41c99-13bd-40d4-8c92-0a4cd6228f63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.105055] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b518d2-abe4-433b-b139-03f5b7ec361a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.155970] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62061736-ef57-4ec4-8eee-440bd176c266 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.163814] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076e2889-2cb2-48f5-86d8-ece14e732c88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.183412] env[62974]: DEBUG nova.compute.provider_tree [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.189492] env[62974]: INFO nova.compute.manager [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Took 31.44 seconds to build instance. [ 896.216490] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52416df0-b17d-2f8c-d32b-461a9cad2275, 'name': SearchDatastore_Task, 'duration_secs': 0.034292} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.217063] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.217433] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.217788] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.218948] env[62974]: DEBUG nova.network.neutron [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Updated VIF entry in instance network info cache for port b3dc1eb5-896e-4ba2-a50d-0626b0e5f490. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.219461] env[62974]: DEBUG nova.network.neutron [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Updating instance_info_cache with network_info: [{"id": "b3dc1eb5-896e-4ba2-a50d-0626b0e5f490", "address": "fa:16:3e:ee:7c:ac", "network": {"id": "3518cb90-bb1f-4059-9f82-0f81c27ff829", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-912534611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de9a9e0393b4445a0ce8b5cd0df272f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3dc1eb5-89", "ovs_interfaceid": "b3dc1eb5-896e-4ba2-a50d-0626b0e5f490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.232863] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9463bca2-892d-4f39-9a14-7c67571bcc84 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.221s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.264885] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ccca2628-e40b-4de0-b0dd-fbc78d091d2e tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.105s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.271784] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123515} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.272114] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 896.272891] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c076a2-d775-4602-8a6a-0a02e4798ddb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.300875] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] c08ed924-9b7d-4773-8e49-c57ecfb27d03/c08ed924-9b7d-4773-8e49-c57ecfb27d03.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.300875] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.300875] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2afce98-8671-45c7-87db-f9f06d3ddcd7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.322259] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 896.322259] env[62974]: value = "task-2654627" [ 896.322259] env[62974]: _type = "Task" [ 896.322259] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.332674] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.333303] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654627, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.486273] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5209af1d-f217-f28f-2d77-bb41310e348b, 'name': SearchDatastore_Task, 'duration_secs': 0.012666} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.486515] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.486744] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. {{(pid=62974) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 896.487053] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.487246] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.487464] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4064e849-6ab9-48ec-92da-ecd910fd4eca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.489691] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9947a2a7-bc45-4359-93a7-29d0849d020c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.496742] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 896.496742] env[62974]: value = "task-2654628" [ 896.496742] env[62974]: _type = "Task" [ 896.496742] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.500978] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.501184] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.502310] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6337af64-ef7f-4c0f-94bf-4c51743f778d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.508408] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.511626] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 896.511626] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea4287-d384-4863-5307-011e29421711" [ 896.511626] env[62974]: _type = "Task" [ 896.511626] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.519808] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea4287-d384-4863-5307-011e29421711, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.690021] env[62974]: DEBUG nova.scheduler.client.report [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.692219] env[62974]: DEBUG oslo_concurrency.lockutils [None req-661e86d2-f90b-488d-9852-a64bb8520bd2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "79448002-daa3-4afd-bd1b-36d734642a9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.957s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.722177] env[62974]: DEBUG oslo_concurrency.lockutils [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] Releasing lock "refresh_cache-eb8647c7-f5e1-4de5-8321-9a9ecff5961c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.722757] env[62974]: DEBUG nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Received event network-vif-deleted-0cccd0c9-281d-4a27-a9ec-0957d9f9f6f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 896.723103] env[62974]: DEBUG nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Received event network-vif-deleted-eaf8ac28-c7f5-4462-9003-c34a22eb0f00 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 896.723384] env[62974]: INFO nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Neutron deleted interface eaf8ac28-c7f5-4462-9003-c34a22eb0f00; detaching it from the instance and deleting it from the info cache [ 896.723714] env[62974]: DEBUG nova.network.neutron [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.819468] env[62974]: DEBUG nova.network.neutron [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.833972] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.010526] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654628, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.024783] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea4287-d384-4863-5307-011e29421711, 'name': SearchDatastore_Task, 'duration_secs': 0.011223} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.026037] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7150ad26-b697-4119-9462-a01d675fd399 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.035210] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 897.035210] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5238fe32-b3f7-1c07-1559-d3ead393d56d" [ 897.035210] env[62974]: _type = "Task" [ 897.035210] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.047544] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5238fe32-b3f7-1c07-1559-d3ead393d56d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.197021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.197021] env[62974]: DEBUG nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 897.199246] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.362s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.199767] env[62974]: DEBUG nova.objects.instance [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'resources' on Instance uuid 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.229556] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-123d14ab-6786-4815-b172-82e30f808951 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.244070] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4da622-e049-4234-8319-f9f81e0ed9f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.298315] env[62974]: DEBUG nova.compute.manager [req-af421c43-f9d1-424a-a9af-67c0dab3aa2e req-1e8b3b85-459b-4903-a1ee-f7281ee0fef2 service nova] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Detach interface failed, port_id=eaf8ac28-c7f5-4462-9003-c34a22eb0f00, reason: Instance 12c769fb-8c9e-4089-9563-232cfad89b21 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 897.322853] env[62974]: INFO nova.compute.manager [-] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Took 1.45 seconds to deallocate network for instance. [ 897.335917] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654627, 'name': ReconfigVM_Task, 'duration_secs': 0.592213} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.337030] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Reconfigured VM instance instance-00000050 to attach disk [datastore1] c08ed924-9b7d-4773-8e49-c57ecfb27d03/c08ed924-9b7d-4773-8e49-c57ecfb27d03.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.337030] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac55aafa-65d5-4281-b8e4-88d99874fdf5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.343170] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 897.343170] env[62974]: value = "task-2654629" [ 897.343170] env[62974]: _type = "Task" [ 897.343170] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.351546] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654629, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.507429] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712847} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.507694] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. [ 897.508505] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5b2e28-dab9-4cc7-964d-8a6c95ce5ee7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.536609] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.536966] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1edfa33d-89af-4dd3-a538-1cd357bfe0db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.559865] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5238fe32-b3f7-1c07-1559-d3ead393d56d, 'name': SearchDatastore_Task, 'duration_secs': 0.054564} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.561314] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.561595] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] eb8647c7-f5e1-4de5-8321-9a9ecff5961c/eb8647c7-f5e1-4de5-8321-9a9ecff5961c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.561976] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 897.561976] env[62974]: value = "task-2654630" [ 897.561976] env[62974]: _type = "Task" [ 897.561976] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.562432] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70d8c2a5-5783-4af2-a8d1-17ea5ad356ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.572280] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654630, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.573494] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 897.573494] env[62974]: value = "task-2654631" [ 897.573494] env[62974]: _type = "Task" [ 897.573494] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.580566] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.703423] env[62974]: DEBUG nova.compute.utils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.704866] env[62974]: DEBUG nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 897.705788] env[62974]: DEBUG nova.network.neutron [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 897.709369] env[62974]: DEBUG nova.objects.instance [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'numa_topology' on Instance uuid 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.766382] env[62974]: DEBUG nova.policy [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a642fe375c743b7958ddeb1490a8032', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e0a57dfe83843708e333b70e0cc2bc4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 897.833197] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.858463] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654629, 'name': Rename_Task, 'duration_secs': 0.294777} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.858463] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.858826] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4c1e819-0f6d-46d2-9c07-02a9e7be63cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.867365] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 897.867365] env[62974]: value = "task-2654632" [ 897.867365] env[62974]: _type = "Task" [ 897.867365] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.879229] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.973749] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a94f1b1-da5a-4ba1-ba93-a40cb310c1e2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "interface-79448002-daa3-4afd-bd1b-36d734642a9e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.974175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a94f1b1-da5a-4ba1-ba93-a40cb310c1e2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "interface-79448002-daa3-4afd-bd1b-36d734642a9e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.974683] env[62974]: DEBUG nova.objects.instance [None req-9a94f1b1-da5a-4ba1-ba93-a40cb310c1e2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lazy-loading 'flavor' on Instance uuid 79448002-daa3-4afd-bd1b-36d734642a9e {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.075605] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.086808] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654631, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.212343] env[62974]: DEBUG nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 898.218314] env[62974]: DEBUG nova.compute.manager [req-92fc006d-3be2-4ae9-9250-f5fa52e005ab req-5c21dbf4-748f-4f28-95cb-8e1779abf823 service nova] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Received event network-vif-deleted-f6b6e9bd-89a4-4847-abfd-7b36b88e5fcf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 898.218534] env[62974]: DEBUG nova.objects.base [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Object Instance<6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7> lazy-loaded attributes: resources,numa_topology {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 898.223770] env[62974]: DEBUG nova.network.neutron [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Successfully created port: 2a679a79-ea4e-44c9-8a79-e5088ad88d84 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.388693] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654632, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.414020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.414020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.414020] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 898.484193] env[62974]: DEBUG nova.objects.instance [None req-9a94f1b1-da5a-4ba1-ba93-a40cb310c1e2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lazy-loading 'pci_requests' on Instance uuid 79448002-daa3-4afd-bd1b-36d734642a9e {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.577358] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654630, 'name': ReconfigVM_Task, 'duration_secs': 0.792731} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.583375] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfigured VM instance instance-00000046 to attach disk [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.584815] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ebe990-c802-4ece-8c2b-634774e28ffa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.594367] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644555} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.617530] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] eb8647c7-f5e1-4de5-8321-9a9ecff5961c/eb8647c7-f5e1-4de5-8321-9a9ecff5961c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 898.617729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 898.623800] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eae621f4-9003-437f-bdd1-d5161f34a44b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.626218] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e1ac272-7a42-4e43-863b-65e0d9810b80 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.646313] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 898.646313] env[62974]: value = "task-2654634" [ 898.646313] env[62974]: _type = "Task" [ 898.646313] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.647702] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 898.647702] env[62974]: value = "task-2654633" [ 898.647702] env[62974]: _type = "Task" [ 898.647702] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.659989] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e69130-722a-47b8-b0e8-9bff8d31042c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.668945] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.673747] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654634, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.674963] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cfe554-f019-4328-a6dd-e89c783db0d4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.707725] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37b6bf6-3d41-46d4-a37c-24bd9e090a48 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.716288] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0b6b2b-38bb-42c2-b433-e3b4f0ea404f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.734748] env[62974]: DEBUG nova.compute.provider_tree [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.762931] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 898.763232] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 898.764192] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a938290e-bb63-4e7f-9944-a0a70c087e21 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.783460] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5b66a3-59d9-44ef-9c72-b9cb1dfe1ce0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.809765] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] volume-13787642-ed9f-449c-b672-b1b3b50942b0/volume-13787642-ed9f-449c-b672-b1b3b50942b0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.810095] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a28ae42-7e5e-46df-aea4-34005f4a1cbd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.829861] env[62974]: DEBUG oslo_vmware.api [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 898.829861] env[62974]: value = "task-2654635" [ 898.829861] env[62974]: _type = "Task" [ 898.829861] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.841998] env[62974]: DEBUG oslo_vmware.api [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654635, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.878797] env[62974]: DEBUG oslo_vmware.api [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654632, 'name': PowerOnVM_Task, 'duration_secs': 0.808559} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.879103] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.879320] env[62974]: INFO nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Took 9.07 seconds to spawn the instance on the hypervisor. [ 898.879495] env[62974]: DEBUG nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 898.880326] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833a0e8f-d9dd-42ff-b065-e58e3b24112c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.938042] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.986638] env[62974]: DEBUG nova.objects.base [None req-9a94f1b1-da5a-4ba1-ba93-a40cb310c1e2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Object Instance<79448002-daa3-4afd-bd1b-36d734642a9e> lazy-loaded attributes: flavor,pci_requests {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 898.986950] env[62974]: DEBUG nova.network.neutron [None req-9a94f1b1-da5a-4ba1-ba93-a40cb310c1e2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.038351] env[62974]: DEBUG nova.objects.instance [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.061697] env[62974]: DEBUG nova.compute.manager [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 899.095908] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9a94f1b1-da5a-4ba1-ba93-a40cb310c1e2 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "interface-79448002-daa3-4afd-bd1b-36d734642a9e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.122s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.159692] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654634, 'name': ReconfigVM_Task, 'duration_secs': 0.389369} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.163271] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.163559] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.33461} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.163768] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e1c67f3-1aef-457a-8362-2ab61dfa6c1f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.165650] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.166203] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1342a39c-ed5b-4a6b-ac1c-547787deb7d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.192019] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] eb8647c7-f5e1-4de5-8321-9a9ecff5961c/eb8647c7-f5e1-4de5-8321-9a9ecff5961c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.192019] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d8f9203-c962-468f-b672-73587882e3d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.205850] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 899.205850] env[62974]: value = "task-2654636" [ 899.205850] env[62974]: _type = "Task" [ 899.205850] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.212112] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 899.212112] env[62974]: value = "task-2654637" [ 899.212112] env[62974]: _type = "Task" [ 899.212112] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.215854] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654636, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.226483] env[62974]: DEBUG nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 899.228406] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654637, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.238856] env[62974]: DEBUG nova.scheduler.client.report [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 899.255657] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 899.255988] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.256339] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 899.256509] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.256764] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 899.256897] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 899.257205] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 899.257400] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 899.257643] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 899.257898] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 899.258120] env[62974]: DEBUG nova.virt.hardware [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 899.259154] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf788d5-a0f5-4875-8f2b-365a737d1591 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.269354] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0727d80-2465-4fec-8226-b6ed716e669c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.340704] env[62974]: DEBUG oslo_vmware.api [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654635, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.398880] env[62974]: INFO nova.compute.manager [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Took 28.81 seconds to build instance. [ 899.544470] env[62974]: DEBUG oslo_concurrency.lockutils [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.544753] env[62974]: DEBUG oslo_concurrency.lockutils [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.544753] env[62974]: DEBUG nova.network.neutron [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.544930] env[62974]: DEBUG nova.objects.instance [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'info_cache' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.561338] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.592416] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.716582] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654636, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.727099] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654637, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.745217] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.546s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.747837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.502s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.748086] env[62974]: DEBUG nova.objects.instance [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lazy-loading 'resources' on Instance uuid c38cddae-95b3-4f4a-bf3a-5f0bdde548a9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.842786] env[62974]: DEBUG oslo_vmware.api [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654635, 'name': ReconfigVM_Task, 'duration_secs': 0.687657} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.843099] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfigured VM instance instance-0000004a to attach disk [datastore1] volume-13787642-ed9f-449c-b672-b1b3b50942b0/volume-13787642-ed9f-449c-b672-b1b3b50942b0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.848294] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f510db73-9309-44fd-a082-6f5c5b16b867 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.866173] env[62974]: DEBUG oslo_vmware.api [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 899.866173] env[62974]: value = "task-2654638" [ 899.866173] env[62974]: _type = "Task" [ 899.866173] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.876919] env[62974]: DEBUG oslo_vmware.api [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654638, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.900931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61d754dd-aac6-4c2e-ac26-55d4dd2b7b52 tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.315s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.013186] env[62974]: DEBUG nova.network.neutron [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Successfully updated port: 2a679a79-ea4e-44c9-8a79-e5088ad88d84 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.048672] env[62974]: DEBUG nova.objects.base [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 900.064075] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-b3827c67-9075-4a53-9f9e-8651e3f4b211" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.064305] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 900.064500] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.064656] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.064797] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.064944] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.065101] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.065251] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.065390] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 900.065535] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.220199] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654636, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.234026] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654637, 'name': ReconfigVM_Task, 'duration_secs': 0.578562} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.234026] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Reconfigured VM instance instance-00000051 to attach disk [datastore1] eb8647c7-f5e1-4de5-8321-9a9ecff5961c/eb8647c7-f5e1-4de5-8321-9a9ecff5961c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.234026] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-851ebfa7-d10b-41da-a09d-f8a5b87ed917 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.238577] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 900.238577] env[62974]: value = "task-2654639" [ 900.238577] env[62974]: _type = "Task" [ 900.238577] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.249229] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654639, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.263018] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ffd212f2-0515-4b68-9045-82f5f9c32329 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 36.286s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.263018] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 16.230s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.263018] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.263018] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.263360] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.265410] env[62974]: INFO nova.compute.manager [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Terminating instance [ 900.378102] env[62974]: DEBUG oslo_vmware.api [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654638, 'name': ReconfigVM_Task, 'duration_secs': 0.185644} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.381057] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 900.424963] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096b6a77-e3ce-44f2-a826-c3009e162a5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.438556] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Suspending the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 900.438993] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-0b6cd5c8-3f11-4590-87c0-c2f86a003668 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.450395] env[62974]: DEBUG oslo_vmware.api [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 900.450395] env[62974]: value = "task-2654640" [ 900.450395] env[62974]: _type = "Task" [ 900.450395] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.458643] env[62974]: DEBUG oslo_vmware.api [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654640, 'name': SuspendVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.517110] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "refresh_cache-226f3328-e3b1-4ae1-8b7c-349b552cf5a2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.517287] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "refresh_cache-226f3328-e3b1-4ae1-8b7c-349b552cf5a2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.517438] env[62974]: DEBUG nova.network.neutron [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 900.568600] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.691953] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cc80c5-279f-4873-80a7-4466b4c1bf11 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.699364] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d391cee-53cc-4286-a1c4-85cf8e33fd2a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.738572] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c8c056-1c89-45e3-8cea-3e7d962d7ac8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.746873] env[62974]: DEBUG nova.compute.manager [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Received event network-vif-plugged-2a679a79-ea4e-44c9-8a79-e5088ad88d84 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 900.747123] env[62974]: DEBUG oslo_concurrency.lockutils [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] Acquiring lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.747328] env[62974]: DEBUG oslo_concurrency.lockutils [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.747491] env[62974]: DEBUG oslo_concurrency.lockutils [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.747678] env[62974]: DEBUG nova.compute.manager [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] No waiting events found dispatching network-vif-plugged-2a679a79-ea4e-44c9-8a79-e5088ad88d84 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 900.747846] env[62974]: WARNING nova.compute.manager [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Received unexpected event network-vif-plugged-2a679a79-ea4e-44c9-8a79-e5088ad88d84 for instance with vm_state building and task_state spawning. [ 900.748012] env[62974]: DEBUG nova.compute.manager [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Received event network-changed-2a679a79-ea4e-44c9-8a79-e5088ad88d84 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 900.748177] env[62974]: DEBUG nova.compute.manager [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Refreshing instance network info cache due to event network-changed-2a679a79-ea4e-44c9-8a79-e5088ad88d84. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 900.748344] env[62974]: DEBUG oslo_concurrency.lockutils [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] Acquiring lock "refresh_cache-226f3328-e3b1-4ae1-8b7c-349b552cf5a2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.754190] env[62974]: DEBUG oslo_vmware.api [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654636, 'name': PowerOnVM_Task, 'duration_secs': 1.302876} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.755757] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0579347-0b0f-49d4-9c81-728c8293cb3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.759847] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.765405] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654639, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.766413] env[62974]: DEBUG nova.compute.manager [None req-36d7437a-a984-4324-838d-3ffccdcb8fc0 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.767557] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9a78d8-6c48-4c9e-b7aa-13e2bcb31354 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.779761] env[62974]: DEBUG nova.compute.manager [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 900.780167] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.780723] env[62974]: DEBUG nova.compute.provider_tree [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.784839] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d56c0b4b-aabe-43e7-88e8-5d88808c5184 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.799346] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a45650-20ff-43b9-a3d0-fa3ef94a2fa8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.835709] env[62974]: WARNING nova.virt.vmwareapi.vmops [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7 could not be found. [ 900.835899] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.836069] env[62974]: INFO nova.compute.manager [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Took 0.06 seconds to destroy the instance on the hypervisor. [ 900.836331] env[62974]: DEBUG oslo.service.loopingcall [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.838148] env[62974]: DEBUG nova.network.neutron [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating instance_info_cache with network_info: [{"id": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "address": "fa:16:3e:9f:c5:d4", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ef50dc0-ed", "ovs_interfaceid": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.839288] env[62974]: DEBUG nova.compute.manager [-] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 900.839394] env[62974]: DEBUG nova.network.neutron [-] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.961479] env[62974]: DEBUG oslo_vmware.api [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654640, 'name': SuspendVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.078292] env[62974]: DEBUG nova.network.neutron [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.253805] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654639, 'name': Rename_Task, 'duration_secs': 1.006767} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.254234] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.254406] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71a17d7f-2d32-4120-833f-64922e581993 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.265138] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 901.265138] env[62974]: value = "task-2654641" [ 901.265138] env[62974]: _type = "Task" [ 901.265138] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.279337] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.285481] env[62974]: DEBUG nova.network.neutron [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Updating instance_info_cache with network_info: [{"id": "2a679a79-ea4e-44c9-8a79-e5088ad88d84", "address": "fa:16:3e:45:cc:3a", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a679a79-ea", "ovs_interfaceid": "2a679a79-ea4e-44c9-8a79-e5088ad88d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.292152] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5284e29a-4e38-2568-f47e-8f41883db2f2/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 901.292152] env[62974]: DEBUG nova.scheduler.client.report [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.297739] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdf57a2-bf40-4593-88c5-5489dcdabd8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.308446] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5284e29a-4e38-2568-f47e-8f41883db2f2/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 901.308887] env[62974]: ERROR oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5284e29a-4e38-2568-f47e-8f41883db2f2/disk-0.vmdk due to incomplete transfer. [ 901.309090] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-16055c89-e55f-4f73-9e98-0e38a5263954 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.317018] env[62974]: DEBUG oslo_vmware.rw_handles [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5284e29a-4e38-2568-f47e-8f41883db2f2/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 901.317018] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Uploaded image 5d681a90-3310-451a-8a9f-42285b699971 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 901.320253] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 901.322477] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9e7489a2-86fc-4291-96c8-55dad59fce9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.329688] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 901.329688] env[62974]: value = "task-2654642" [ 901.329688] env[62974]: _type = "Task" [ 901.329688] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.340069] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654642, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.345619] env[62974]: DEBUG oslo_concurrency.lockutils [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.426516] env[62974]: DEBUG nova.objects.instance [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'flavor' on Instance uuid 3df97cea-5a6e-4d7a-b2f3-e02213816e24 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.469083] env[62974]: DEBUG oslo_vmware.api [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654640, 'name': SuspendVM_Task} progress is 50%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.613015] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "79448002-daa3-4afd-bd1b-36d734642a9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.613327] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "79448002-daa3-4afd-bd1b-36d734642a9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.613599] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "79448002-daa3-4afd-bd1b-36d734642a9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.613760] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "79448002-daa3-4afd-bd1b-36d734642a9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.613937] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "79448002-daa3-4afd-bd1b-36d734642a9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.616726] env[62974]: INFO nova.compute.manager [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Terminating instance [ 901.634903] env[62974]: DEBUG nova.network.neutron [-] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.775784] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654641, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.791029] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "refresh_cache-226f3328-e3b1-4ae1-8b7c-349b552cf5a2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.791029] env[62974]: DEBUG nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Instance network_info: |[{"id": "2a679a79-ea4e-44c9-8a79-e5088ad88d84", "address": "fa:16:3e:45:cc:3a", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a679a79-ea", "ovs_interfaceid": "2a679a79-ea4e-44c9-8a79-e5088ad88d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 901.791265] env[62974]: DEBUG oslo_concurrency.lockutils [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] Acquired lock "refresh_cache-226f3328-e3b1-4ae1-8b7c-349b552cf5a2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.791265] env[62974]: DEBUG nova.network.neutron [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Refreshing network info cache for port 2a679a79-ea4e-44c9-8a79-e5088ad88d84 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.791265] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:cc:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a679a79-ea4e-44c9-8a79-e5088ad88d84', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.802455] env[62974]: DEBUG oslo.service.loopingcall [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.804891] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.805977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.808164] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57f9e4c3-5e1b-4975-a974-a52bb98dbd43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.825644] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.596s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.826158] env[62974]: DEBUG nova.objects.instance [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lazy-loading 'resources' on Instance uuid b31dea29-79d6-4117-bdb5-2d38fb660a53 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.842254] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.842254] env[62974]: value = "task-2654643" [ 901.842254] env[62974]: _type = "Task" [ 901.842254] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.620174] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.620498] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.620619] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.620795] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.620957] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.625430] env[62974]: DEBUG nova.compute.manager [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 902.625628] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.626147] env[62974]: INFO nova.compute.manager [-] [instance: 6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7] Took 1.79 seconds to deallocate network for instance. [ 902.630674] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654642, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.630852] env[62974]: WARNING oslo_vmware.common.loopingcall [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] task run outlasted interval by 0.29694299999999996 sec [ 902.631605] env[62974]: INFO nova.compute.manager [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Terminating instance [ 902.633130] env[62974]: DEBUG oslo_concurrency.lockutils [None req-855c1f2b-413c-4dcb-bafd-efe658c97ec0 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.036s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.634597] env[62974]: INFO nova.scheduler.client.report [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Deleted allocations for instance c38cddae-95b3-4f4a-bf3a-5f0bdde548a9 [ 902.643246] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9344fde4-a266-4732-bad9-b22177010efd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.663962] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654643, 'name': CreateVM_Task, 'duration_secs': 0.431208} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.668541] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.669726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.669898] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.670244] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.678472] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cd46ea0-ee00-4315-b09e-d84d24fdb338 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.681040] env[62974]: DEBUG oslo_vmware.api [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654641, 'name': PowerOnVM_Task, 'duration_secs': 0.951013} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.682992] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.682992] env[62974]: DEBUG oslo_vmware.api [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654640, 'name': SuspendVM_Task, 'duration_secs': 1.60079} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.685132] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.686731] env[62974]: INFO nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Took 10.34 seconds to spawn the instance on the hypervisor. [ 902.686731] env[62974]: DEBUG nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.686731] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-162e3269-6e59-4f06-b017-eeabc7bd1e2f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.687400] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Suspended the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 902.687585] env[62974]: DEBUG nova.compute.manager [None req-cd98cea2-942f-44dc-8113-bea2c1aca242 tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.687858] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654642, 'name': Destroy_Task, 'duration_secs': 0.688261} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.688926] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05de50f-ac91-431d-ba59-600c0b9387c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.692569] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220c7ee4-f46c-47b3-b349-c98f4db84fd5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.695411] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Destroyed the VM [ 902.695411] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 902.699170] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ba3627ee-5e9d-41db-8448-eb1679ae5099 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.701581] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 902.701581] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ed2a09-df81-1e4e-6ebc-88871406f0f5" [ 902.701581] env[62974]: _type = "Task" [ 902.701581] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.707610] env[62974]: DEBUG oslo_vmware.api [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 902.707610] env[62974]: value = "task-2654644" [ 902.707610] env[62974]: _type = "Task" [ 902.707610] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.729436] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 902.729436] env[62974]: value = "task-2654645" [ 902.729436] env[62974]: _type = "Task" [ 902.729436] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.734095] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ed2a09-df81-1e4e-6ebc-88871406f0f5, 'name': SearchDatastore_Task, 'duration_secs': 0.027064} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.738235] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.739059] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.739059] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.739059] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.739059] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.743341] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36e7aa76-2e01-4499-a4cb-6f124da4be2f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.745817] env[62974]: DEBUG oslo_vmware.api [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.753197] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654645, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.759830] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.760092] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.760843] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5b798ea-bcfc-4fdf-9cc5-dba54beefd0b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.768199] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 902.768199] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fc14d8-78b4-f15f-696b-9cd3251e9642" [ 902.768199] env[62974]: _type = "Task" [ 902.768199] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.779620] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fc14d8-78b4-f15f-696b-9cd3251e9642, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.980250] env[62974]: DEBUG nova.network.neutron [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Updated VIF entry in instance network info cache for port 2a679a79-ea4e-44c9-8a79-e5088ad88d84. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.980732] env[62974]: DEBUG nova.network.neutron [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Updating instance_info_cache with network_info: [{"id": "2a679a79-ea4e-44c9-8a79-e5088ad88d84", "address": "fa:16:3e:45:cc:3a", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a679a79-ea", "ovs_interfaceid": "2a679a79-ea4e-44c9-8a79-e5088ad88d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.080617] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b68a5e7-91bf-4fd3-8af4-fa4a6e90527f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.089621] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ab2ec7-b761-49c3-8164-6970e4a071f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.122198] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9ccdce-0642-46dd-a216-a12db3a32333 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.130804] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b2cd56-7c80-4db9-af10-5e83bd336f9b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.135533] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.136165] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1127047c-7bd3-447d-9ba4-1fe96f87ef62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.149497] env[62974]: DEBUG nova.compute.provider_tree [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.152369] env[62974]: INFO nova.compute.manager [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Rebuilding instance [ 903.154335] env[62974]: DEBUG oslo_vmware.api [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 903.154335] env[62974]: value = "task-2654646" [ 903.154335] env[62974]: _type = "Task" [ 903.154335] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.164741] env[62974]: DEBUG oslo_vmware.api [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654646, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.169038] env[62974]: DEBUG nova.compute.manager [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 903.169038] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 903.171152] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b72fa1-f6ca-49ba-bb8d-a7d875a34298 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.174306] env[62974]: DEBUG oslo_concurrency.lockutils [None req-019cce18-29fc-4a71-a879-490461142404 tempest-ServersWithSpecificFlavorTestJSON-5389811 tempest-ServersWithSpecificFlavorTestJSON-5389811-project-member] Lock "c38cddae-95b3-4f4a-bf3a-5f0bdde548a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.245s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.180902] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.181478] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4d07ff0-9635-4e13-8c30-96ef86a11291 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.196983] env[62974]: DEBUG oslo_vmware.api [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 903.196983] env[62974]: value = "task-2654647" [ 903.196983] env[62974]: _type = "Task" [ 903.196983] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.207339] env[62974]: DEBUG oslo_vmware.api [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.212989] env[62974]: DEBUG nova.compute.manager [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.216917] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a53c565-3e55-4ecf-a878-03babb4a438a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.236169] env[62974]: DEBUG oslo_vmware.api [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654644, 'name': PowerOffVM_Task, 'duration_secs': 0.421444} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.238216] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.238478] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.238807] env[62974]: INFO nova.compute.manager [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Took 31.71 seconds to build instance. [ 903.243710] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9609da98-c330-4995-92fb-a6f17832555d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.251824] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654645, 'name': RemoveSnapshot_Task} progress is 58%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.280792] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fc14d8-78b4-f15f-696b-9cd3251e9642, 'name': SearchDatastore_Task, 'duration_secs': 0.018564} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.282286] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d34a1e2-0de9-4409-93a7-e57e6594894f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.290930] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 903.290930] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526f77f1-cf81-cf30-409f-40a8048c7896" [ 903.290930] env[62974]: _type = "Task" [ 903.290930] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.298835] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526f77f1-cf81-cf30-409f-40a8048c7896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.314705] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.314902] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.315224] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Deleting the datastore file [datastore2] 79448002-daa3-4afd-bd1b-36d734642a9e {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.315470] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41ff8629-fab9-418a-94e7-121757897bcd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.324198] env[62974]: DEBUG oslo_vmware.api [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for the task: (returnval){ [ 903.324198] env[62974]: value = "task-2654649" [ 903.324198] env[62974]: _type = "Task" [ 903.324198] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.337757] env[62974]: DEBUG oslo_vmware.api [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.444300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.444434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.488829] env[62974]: DEBUG oslo_concurrency.lockutils [req-c7f44e05-5ef8-476b-ad6e-39ba006b22ab req-09a25949-fd2a-4082-8fc0-920890215208 service nova] Releasing lock "refresh_cache-226f3328-e3b1-4ae1-8b7c-349b552cf5a2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.656194] env[62974]: DEBUG nova.scheduler.client.report [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.673390] env[62974]: DEBUG oslo_vmware.api [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654646, 'name': PowerOnVM_Task, 'duration_secs': 0.436927} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.673591] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.674408] env[62974]: DEBUG nova.compute.manager [None req-224b5ca2-1ab3-4087-baff-c8c1b1d3f551 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.676966] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ceba73-4b77-4beb-9a83-82c19b0ee668 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.699998] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f03eca7a-55f9-4096-a979-908199143475 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "6d038dd1-cfd9-4726-afe4-6cdc5dfbcdf7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.438s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.714239] env[62974]: DEBUG oslo_vmware.api [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654647, 'name': PowerOffVM_Task, 'duration_secs': 0.21282} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.714521] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.714734] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.715415] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f17e245-e735-4d70-8f19-05319f16fb35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.746087] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4a57ee1-3a2d-4664-9181-17d420038873 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.227s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.751776] env[62974]: DEBUG oslo_vmware.api [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654645, 'name': RemoveSnapshot_Task, 'duration_secs': 0.879456} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.752063] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 903.752296] env[62974]: INFO nova.compute.manager [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Took 21.50 seconds to snapshot the instance on the hypervisor. [ 903.796432] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.796648] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.796828] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Deleting the datastore file [datastore1] c08ed924-9b7d-4773-8e49-c57ecfb27d03 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.797501] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-947a658e-e825-42fe-bf62-863241f93a65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.802736] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526f77f1-cf81-cf30-409f-40a8048c7896, 'name': SearchDatastore_Task, 'duration_secs': 0.010547} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.803780] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.803780] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 226f3328-e3b1-4ae1-8b7c-349b552cf5a2/226f3328-e3b1-4ae1-8b7c-349b552cf5a2.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.803937] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17f0cb9d-1d9f-40cb-928f-7aaae5974f88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.807040] env[62974]: DEBUG oslo_vmware.api [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for the task: (returnval){ [ 903.807040] env[62974]: value = "task-2654651" [ 903.807040] env[62974]: _type = "Task" [ 903.807040] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.811159] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 903.811159] env[62974]: value = "task-2654652" [ 903.811159] env[62974]: _type = "Task" [ 903.811159] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.817645] env[62974]: DEBUG oslo_vmware.api [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.822502] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.831935] env[62974]: DEBUG oslo_vmware.api [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Task: {'id': task-2654649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25716} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.832543] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.832543] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 903.832685] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 903.832775] env[62974]: INFO nova.compute.manager [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Took 1.21 seconds to destroy the instance on the hypervisor. [ 903.833015] env[62974]: DEBUG oslo.service.loopingcall [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.833205] env[62974]: DEBUG nova.compute.manager [-] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 903.833296] env[62974]: DEBUG nova.network.neutron [-] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 903.947822] env[62974]: DEBUG nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 904.063694] env[62974]: INFO nova.compute.manager [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Unrescuing [ 904.063838] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.064613] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquired lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.064613] env[62974]: DEBUG nova.network.neutron [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.163127] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.337s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.166488] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.846s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.166860] env[62974]: DEBUG nova.objects.instance [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lazy-loading 'resources' on Instance uuid c763d45b-44f0-4557-a726-7aad2bc58ba8 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.200364] env[62974]: INFO nova.scheduler.client.report [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Deleted allocations for instance b31dea29-79d6-4117-bdb5-2d38fb660a53 [ 904.246698] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.247549] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c1e5dd7-8e7f-49fb-9a96-47cdf96027fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.259337] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 904.259337] env[62974]: value = "task-2654653" [ 904.259337] env[62974]: _type = "Task" [ 904.259337] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.274311] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654653, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.300488] env[62974]: DEBUG nova.compute.manager [None req-619dc65a-bfcd-4cc3-8046-ac25826a2e72 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Found 1 images (rotation: 2) {{(pid=62974) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 904.328208] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654652, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.333553] env[62974]: DEBUG oslo_vmware.api [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Task: {'id': task-2654651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295121} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.333923] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.334208] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.334481] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.335406] env[62974]: INFO nova.compute.manager [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Took 1.17 seconds to destroy the instance on the hypervisor. [ 904.335406] env[62974]: DEBUG oslo.service.loopingcall [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.335406] env[62974]: DEBUG nova.compute.manager [-] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.335641] env[62974]: DEBUG nova.network.neutron [-] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.474158] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.535368] env[62974]: INFO nova.compute.manager [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Resuming [ 904.535574] env[62974]: DEBUG nova.objects.instance [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lazy-loading 'flavor' on Instance uuid 366b5816-a847-48d1-ad03-5758e473a9d0 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.654411] env[62974]: DEBUG nova.network.neutron [-] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.714021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9d16fa16-f0e9-4e9a-af64-178b75282819 tempest-ListServerFiltersTestJSON-869618597 tempest-ListServerFiltersTestJSON-869618597-project-member] Lock "b31dea29-79d6-4117-bdb5-2d38fb660a53" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.114s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.776021] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654653, 'name': PowerOffVM_Task, 'duration_secs': 0.474022} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.776459] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 904.826354] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654652, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.851135] env[62974]: INFO nova.compute.manager [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Detaching volume 13787642-ed9f-449c-b672-b1b3b50942b0 [ 904.891997] env[62974]: INFO nova.virt.block_device [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Attempting to driver detach volume 13787642-ed9f-449c-b672-b1b3b50942b0 from mountpoint /dev/sdb [ 904.892368] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 904.892646] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 904.893753] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9a14a9-2029-41e0-8b07-3094bdf33a83 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.918173] env[62974]: DEBUG nova.network.neutron [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.920034] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97e55fb-fac4-4559-af20-8fa848d253b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.927383] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7709e3dc-3298-4167-8fa9-c545f249b474 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.956302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02c6c07-efe7-43ef-85c4-f1fc59bf23c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.976610] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] The volume has not been displaced from its original location: [datastore1] volume-13787642-ed9f-449c-b672-b1b3b50942b0/volume-13787642-ed9f-449c-b672-b1b3b50942b0.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 904.984813] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfiguring VM instance instance-0000004a to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 904.988590] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48933fca-49f3-4124-9f25-a01bc1cf251c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.008359] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 905.008359] env[62974]: value = "task-2654654" [ 905.008359] env[62974]: _type = "Task" [ 905.008359] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.019322] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654654, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.047860] env[62974]: DEBUG nova.compute.manager [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.048745] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fd0c9c-40f7-40f5-9194-e7d21e5ae64c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.157647] env[62974]: INFO nova.compute.manager [-] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Took 1.32 seconds to deallocate network for instance. [ 905.177724] env[62974]: DEBUG nova.network.neutron [-] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.271704] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1212908-024e-4843-9b7d-1b195e044ef3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.279929] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ca2628-d561-4880-be1a-ffa8f97d2d72 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.321145] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f656bab8-4d45-413e-b667-5c1195271265 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.329259] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654652, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.332423] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edc241f-91ee-4920-b46a-48f8a077b910 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.347652] env[62974]: DEBUG nova.compute.provider_tree [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.356217] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.356307] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.424172] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Releasing lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.424886] env[62974]: DEBUG nova.objects.instance [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lazy-loading 'flavor' on Instance uuid e23dbff7-d23e-4909-9b33-67ed15c325e7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.487359] env[62974]: DEBUG nova.compute.manager [req-ec91ef56-f5ae-422c-8959-7376c9fae622 req-c49db89f-effe-4f98-ab56-6d3bfa581081 service nova] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Received event network-vif-deleted-31c4426e-2cae-45ff-be26-c79cdd0db248 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 905.487653] env[62974]: DEBUG nova.compute.manager [req-ec91ef56-f5ae-422c-8959-7376c9fae622 req-c49db89f-effe-4f98-ab56-6d3bfa581081 service nova] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Received event network-vif-deleted-2236b408-d781-4e4d-96dd-2759f3457b38 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 905.519317] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654654, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.562348] env[62974]: INFO nova.compute.manager [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] instance snapshotting [ 905.562929] env[62974]: DEBUG nova.objects.instance [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'flavor' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.672407] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.683115] env[62974]: INFO nova.compute.manager [-] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Took 1.35 seconds to deallocate network for instance. [ 905.828154] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654652, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.677622} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.828508] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 226f3328-e3b1-4ae1-8b7c-349b552cf5a2/226f3328-e3b1-4ae1-8b7c-349b552cf5a2.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.828746] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.829135] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04934e81-46ce-41c9-89e1-bf98be56bbbb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.835933] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 905.835933] env[62974]: value = "task-2654655" [ 905.835933] env[62974]: _type = "Task" [ 905.835933] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.844356] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.857131] env[62974]: DEBUG nova.scheduler.client.report [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.861071] env[62974]: DEBUG nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.932750] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2560965b-a8f0-4b7d-9b8f-5ec6180a10b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.962176] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.962770] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb8de90f-436d-4edb-b149-bc5edc4bacf8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.968843] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 905.968843] env[62974]: value = "task-2654656" [ 905.968843] env[62974]: _type = "Task" [ 905.968843] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.978571] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.019128] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654654, 'name': ReconfigVM_Task, 'duration_secs': 0.642186} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.019461] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfigured VM instance instance-0000004a to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 906.026362] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4628b9f3-aca1-4a23-a5b7-49ef0acc0643 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.046023] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 906.046023] env[62974]: value = "task-2654657" [ 906.046023] env[62974]: _type = "Task" [ 906.046023] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.054131] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.054360] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquired lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.054562] env[62974]: DEBUG nova.network.neutron [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.055827] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.068682] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2601cbe-6a2b-42a6-aee4-8ed5a6b31d3c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.092202] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5228995-7a38-43b2-acc1-7c5491283075 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.189919] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.346741] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120458} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.347540] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 906.348481] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94750953-c3dc-44fc-9d63-c9e9f9024c63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.372460] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 226f3328-e3b1-4ae1-8b7c-349b552cf5a2/226f3328-e3b1-4ae1-8b7c-349b552cf5a2.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 906.373702] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.207s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.378191] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c33ab864-e451-42e2-a912-a8481f0df050 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.395888] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.687s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.396482] env[62974]: DEBUG nova.objects.instance [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lazy-loading 'resources' on Instance uuid 70adaccf-44ab-44b1-ac8a-005d42c09f0a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.406020] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 906.406020] env[62974]: value = "task-2654658" [ 906.406020] env[62974]: _type = "Task" [ 906.406020] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.414576] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654658, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.415814] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.422582] env[62974]: INFO nova.scheduler.client.report [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Deleted allocations for instance c763d45b-44f0-4557-a726-7aad2bc58ba8 [ 906.479748] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654656, 'name': PowerOffVM_Task, 'duration_secs': 0.242887} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.480226] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.485957] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfiguring VM instance instance-00000046 to detach disk 2002 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 906.486612] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db80c529-3934-4480-bcd2-923573a84af3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.509024] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 906.509024] env[62974]: value = "task-2654659" [ 906.509024] env[62974]: _type = "Task" [ 906.509024] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.515208] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654659, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.558022] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654657, 'name': ReconfigVM_Task, 'duration_secs': 0.152998} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.558022] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 906.606766] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 906.606766] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-28735a9d-4f85-4854-a3d1-351d9048ab17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.616207] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 906.616207] env[62974]: value = "task-2654660" [ 906.616207] env[62974]: _type = "Task" [ 906.616207] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.624869] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654660, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.815654] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.816928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.816928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.816928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.816928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.818746] env[62974]: INFO nova.compute.manager [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Terminating instance [ 906.882499] env[62974]: DEBUG nova.network.neutron [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [{"id": "07b0aa8b-b38d-489b-9998-6efe6126083f", "address": "fa:16:3e:60:15:2c", "network": {"id": "e99637ca-d0a9-4a3a-a1ce-bde518359f13", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1286834988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e631c2e78a4391bceb20072992f8bd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b0aa8b-b3", "ovs_interfaceid": "07b0aa8b-b38d-489b-9998-6efe6126083f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.917770] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654658, 'name': ReconfigVM_Task, 'duration_secs': 0.409744} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.918025] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 226f3328-e3b1-4ae1-8b7c-349b552cf5a2/226f3328-e3b1-4ae1-8b7c-349b552cf5a2.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.918721] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1f13907-a77b-48e9-b119-a8295e17e40f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.925203] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 906.925203] env[62974]: value = "task-2654661" [ 906.925203] env[62974]: _type = "Task" [ 906.925203] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.934641] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654661, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.937484] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f66856a-05e9-4d06-a55d-4b2c6cbc6c3b tempest-ServersTestFqdnHostnames-1870631720 tempest-ServersTestFqdnHostnames-1870631720-project-member] Lock "c763d45b-44f0-4557-a726-7aad2bc58ba8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.770s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.022199] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654659, 'name': ReconfigVM_Task, 'duration_secs': 0.26026} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.025640] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfigured VM instance instance-00000046 to detach disk 2002 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 907.025807] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.026298] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ae6ccca-d9f6-4806-9c1e-96048475eee8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.032509] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 907.032509] env[62974]: value = "task-2654662" [ 907.032509] env[62974]: _type = "Task" [ 907.032509] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.041662] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654662, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.134146] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654660, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.303165] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8664334-1217-4f65-b9f2-e63af58dd8f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.311696] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ebc56c-90af-4dc1-bbff-7ad73e2a6c86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.350675] env[62974]: DEBUG nova.compute.manager [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.350958] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.352124] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4c0052-05cc-4b50-a4aa-4281e46a90f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.355338] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fbacfe-ac64-43b5-9522-573dcccdb745 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.365359] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bf5413-632c-4fd9-8dae-482c7e37b230 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.369401] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.369701] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-441c1fcc-98d6-4d47-8303-d1d05655833c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.381978] env[62974]: DEBUG nova.compute.provider_tree [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.386679] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Releasing lock "refresh_cache-366b5816-a847-48d1-ad03-5758e473a9d0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.386679] env[62974]: DEBUG oslo_vmware.api [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 907.386679] env[62974]: value = "task-2654663" [ 907.386679] env[62974]: _type = "Task" [ 907.386679] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.386679] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735a98a2-1ccb-483d-ba1c-055fbd0bc896 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.399161] env[62974]: DEBUG oslo_vmware.api [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654663, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.401709] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Resuming the VM {{(pid=62974) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 907.401709] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6187bc5f-268f-40cf-800b-7e34081aef82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.408042] env[62974]: DEBUG oslo_vmware.api [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 907.408042] env[62974]: value = "task-2654664" [ 907.408042] env[62974]: _type = "Task" [ 907.408042] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.418807] env[62974]: DEBUG oslo_vmware.api [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.434400] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654661, 'name': Rename_Task, 'duration_secs': 0.218618} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.434669] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.434920] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f3af2b8-2952-4dea-af02-64ff7978558a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.441669] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 907.441669] env[62974]: value = "task-2654665" [ 907.441669] env[62974]: _type = "Task" [ 907.441669] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.451697] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.542546] env[62974]: DEBUG oslo_vmware.api [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654662, 'name': PowerOnVM_Task, 'duration_secs': 0.413733} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.542827] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.543097] env[62974]: DEBUG nova.compute.manager [None req-11c934cb-f595-40c0-be22-ca8f186e85fd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.543889] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0eebd71-e67f-49dc-bf3c-14b29002e1f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.632870] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.632870] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654660, 'name': CreateSnapshot_Task, 'duration_secs': 0.632638} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.632870] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cf051b6-f264-43b4-8bc3-92f7e4384170 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.635204] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 907.636225] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bcba47-f44a-49f7-9325-cc1eff5749a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.649548] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 907.649548] env[62974]: value = "task-2654666" [ 907.649548] env[62974]: _type = "Task" [ 907.649548] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.659813] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 907.660118] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 907.660313] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 907.662634] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c1055e-8bd6-4c8e-9adc-7f3948f9c650 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.693123] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be74b988-1244-4b54-92d5-7cbfc2df26ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.702517] env[62974]: WARNING nova.virt.vmwareapi.driver [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 907.702938] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.704213] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce8be22-f277-477c-aa86-ec1235f3cf08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.713849] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.714250] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d18ff726-4f1c-4192-b48c-ce49f3c549e4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.794293] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.794565] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.794750] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleting the datastore file [datastore2] 3df97cea-5a6e-4d7a-b2f3-e02213816e24 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.795045] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0171285-2c2e-4adf-afa0-a6b8be48e508 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.802310] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 907.802310] env[62974]: value = "task-2654668" [ 907.802310] env[62974]: _type = "Task" [ 907.802310] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.815347] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.888054] env[62974]: DEBUG nova.scheduler.client.report [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.909675] env[62974]: DEBUG oslo_vmware.api [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654663, 'name': PowerOffVM_Task, 'duration_secs': 0.22662} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.913486] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.913764] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.914766] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04523ef1-1f46-4808-9ab8-2e84a2564700 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.926712] env[62974]: DEBUG oslo_vmware.api [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654664, 'name': PowerOnVM_Task} progress is 93%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.958633] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654665, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.980597] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.981200] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.981504] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Deleting the datastore file [datastore1] eb8647c7-f5e1-4de5-8321-9a9ecff5961c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.981828] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4101081-5e18-48da-883f-e5d727853990 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.989135] env[62974]: DEBUG oslo_vmware.api [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 907.989135] env[62974]: value = "task-2654670" [ 907.989135] env[62974]: _type = "Task" [ 907.989135] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.998564] env[62974]: DEBUG oslo_vmware.api [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654670, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.160702] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 908.161679] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-96ac5edd-f069-4887-9274-45216971943c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.171490] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 908.171490] env[62974]: value = "task-2654671" [ 908.171490] env[62974]: _type = "Task" [ 908.171490] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.182796] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654671, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.313485] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.397154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.397565] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.099s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.398783] env[62974]: DEBUG nova.objects.instance [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lazy-loading 'resources' on Instance uuid 12c769fb-8c9e-4089-9563-232cfad89b21 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.420664] env[62974]: DEBUG oslo_vmware.api [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654664, 'name': PowerOnVM_Task, 'duration_secs': 0.662349} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.421708] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Resumed the VM {{(pid=62974) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 908.421924] env[62974]: DEBUG nova.compute.manager [None req-6f09d33b-b002-4eed-a7bc-57811f50d1af tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 908.422788] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c76a4a0-17ac-4d5a-aeb8-2f1f7123827c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.439534] env[62974]: INFO nova.scheduler.client.report [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted allocations for instance 70adaccf-44ab-44b1-ac8a-005d42c09f0a [ 908.452830] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654665, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.501363] env[62974]: DEBUG oslo_vmware.api [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654670, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264348} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.501628] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.501812] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.501985] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.502755] env[62974]: INFO nova.compute.manager [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 908.503029] env[62974]: DEBUG oslo.service.loopingcall [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.503479] env[62974]: DEBUG nova.compute.manager [-] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.503579] env[62974]: DEBUG nova.network.neutron [-] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.682425] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654671, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.821078] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.549236} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.821078] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.821078] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.821078] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.953354] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f0e2d96d-21bd-4e98-b3f4-9affac77ebfd tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "70adaccf-44ab-44b1-ac8a-005d42c09f0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.350s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.959277] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654665, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.190321] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654671, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.325232] env[62974]: INFO nova.virt.block_device [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Booting with volume 13787642-ed9f-449c-b672-b1b3b50942b0 at /dev/sdb [ 909.353801] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ec1799-dabe-40c9-8b8f-a0f94673a90d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.370935] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa01592-3bc1-4074-a312-97efc23cd4a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.411007] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47a388d-6672-4ee8-b742-c2bb3e328911 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.414616] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aea4194d-21c2-485b-a061-499eebe55aa3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.417940] env[62974]: DEBUG nova.compute.manager [req-80c57bb9-e44b-4894-b7b8-b3510b1e0ba3 req-c6d2e385-2e76-4a37-ba03-e5f24432a4a3 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Received event network-vif-deleted-b3dc1eb5-896e-4ba2-a50d-0626b0e5f490 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 909.418062] env[62974]: INFO nova.compute.manager [req-80c57bb9-e44b-4894-b7b8-b3510b1e0ba3 req-c6d2e385-2e76-4a37-ba03-e5f24432a4a3 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Neutron deleted interface b3dc1eb5-896e-4ba2-a50d-0626b0e5f490; detaching it from the instance and deleting it from the info cache [ 909.418162] env[62974]: DEBUG nova.network.neutron [req-80c57bb9-e44b-4894-b7b8-b3510b1e0ba3 req-c6d2e385-2e76-4a37-ba03-e5f24432a4a3 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.426769] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682d42e8-3887-422d-9ab5-a7fcafe5d589 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.435101] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8518942d-771e-4e3a-89ae-45d2c2bda50b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.459099] env[62974]: DEBUG nova.compute.provider_tree [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.478907] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53b07431-cbc0-4181-bef1-12e928b897e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.480848] env[62974]: DEBUG oslo_vmware.api [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654665, 'name': PowerOnVM_Task, 'duration_secs': 1.656621} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.482476] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.482597] env[62974]: INFO nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Took 10.26 seconds to spawn the instance on the hypervisor. [ 909.482760] env[62974]: DEBUG nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 909.486752] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9763813-eabf-4d4d-bcb7-a4d1a54f8a61 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.494947] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0221bb61-5011-42dd-802b-7f1c436f0b75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.536810] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98239031-807f-40c9-9495-9894b5d29110 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.543523] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f914bda4-916d-4cb6-9123-8776e3708b75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.558231] env[62974]: DEBUG nova.virt.block_device [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updating existing volume attachment record: 827ea418-429a-4d15-b85f-5af987ee48a2 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 909.608109] env[62974]: DEBUG nova.network.neutron [-] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.682346] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654671, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.921645] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e5d9124-3e26-4c79-9e9c-37ca595850b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.932282] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9730af-8f87-4487-bf12-741340a16d05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.974495] env[62974]: DEBUG nova.scheduler.client.report [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.977373] env[62974]: DEBUG nova.compute.manager [req-80c57bb9-e44b-4894-b7b8-b3510b1e0ba3 req-c6d2e385-2e76-4a37-ba03-e5f24432a4a3 service nova] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Detach interface failed, port_id=b3dc1eb5-896e-4ba2-a50d-0626b0e5f490, reason: Instance eb8647c7-f5e1-4de5-8321-9a9ecff5961c could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 909.993224] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.993349] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.993489] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.994400] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.994400] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.998300] env[62974]: INFO nova.compute.manager [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Terminating instance [ 910.022209] env[62974]: INFO nova.compute.manager [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Took 28.14 seconds to build instance. [ 910.112883] env[62974]: INFO nova.compute.manager [-] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Took 1.61 seconds to deallocate network for instance. [ 910.187176] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654671, 'name': CloneVM_Task, 'duration_secs': 1.880536} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.187557] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Created linked-clone VM from snapshot [ 910.188301] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc948e80-ab07-443f-90ed-a2b3b332cba6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.198962] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Uploading image b0128a71-8a8e-451d-82d7-854613873b6a {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 910.226579] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 910.226579] env[62974]: value = "vm-535431" [ 910.226579] env[62974]: _type = "VirtualMachine" [ 910.226579] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 910.226918] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7a66fb79-6ba9-4678-995e-4e9fdfcd6550 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.241276] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease: (returnval){ [ 910.241276] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4be6e-36a0-55c7-d18c-40d7d22f6155" [ 910.241276] env[62974]: _type = "HttpNfcLease" [ 910.241276] env[62974]: } obtained for exporting VM: (result){ [ 910.241276] env[62974]: value = "vm-535431" [ 910.241276] env[62974]: _type = "VirtualMachine" [ 910.241276] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 910.241276] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the lease: (returnval){ [ 910.241276] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4be6e-36a0-55c7-d18c-40d7d22f6155" [ 910.241276] env[62974]: _type = "HttpNfcLease" [ 910.241276] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 910.247096] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 910.247096] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4be6e-36a0-55c7-d18c-40d7d22f6155" [ 910.247096] env[62974]: _type = "HttpNfcLease" [ 910.247096] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 910.437780] env[62974]: DEBUG nova.compute.manager [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Received event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 910.437986] env[62974]: DEBUG nova.compute.manager [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing instance network info cache due to event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 910.438289] env[62974]: DEBUG oslo_concurrency.lockutils [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] Acquiring lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.438502] env[62974]: DEBUG oslo_concurrency.lockutils [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] Acquired lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.439254] env[62974]: DEBUG nova.network.neutron [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 910.479270] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.482456] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.149s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.482456] env[62974]: DEBUG nova.objects.instance [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lazy-loading 'resources' on Instance uuid a14e7e40-afef-4607-8fa9-935a92ea49dc {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 910.507224] env[62974]: DEBUG nova.compute.manager [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.507224] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.507224] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b722c43-27f0-4959-b13d-20f7ac15d0ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.509829] env[62974]: INFO nova.scheduler.client.report [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Deleted allocations for instance 12c769fb-8c9e-4089-9563-232cfad89b21 [ 910.518685] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.519130] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efac470f-5273-4582-be4a-f73eee56fcd1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.524329] env[62974]: DEBUG oslo_concurrency.lockutils [None req-84ede54e-05ef-410a-9dc0-56206d850fcd tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.654s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.525790] env[62974]: DEBUG oslo_vmware.api [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 910.525790] env[62974]: value = "task-2654673" [ 910.525790] env[62974]: _type = "Task" [ 910.525790] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.534566] env[62974]: DEBUG oslo_vmware.api [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.621751] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.748928] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 910.748928] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4be6e-36a0-55c7-d18c-40d7d22f6155" [ 910.748928] env[62974]: _type = "HttpNfcLease" [ 910.748928] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 910.749262] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 910.749262] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4be6e-36a0-55c7-d18c-40d7d22f6155" [ 910.749262] env[62974]: _type = "HttpNfcLease" [ 910.749262] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 910.750203] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8639db74-1c58-4919-9895-0f78bced1b9b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.758625] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d79ad0-db73-a3cf-1377-390da8b11252/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 910.758777] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d79ad0-db73-a3cf-1377-390da8b11252/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 910.939993] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fcd9ffc0-d525-49d3-8d02-1530b142e759 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.022645] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c2bec54f-103f-45db-8348-19ea3e44b0a3 tempest-AttachInterfacesUnderV243Test-1852418656 tempest-AttachInterfacesUnderV243Test-1852418656-project-member] Lock "12c769fb-8c9e-4089-9563-232cfad89b21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.790s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.048237] env[62974]: DEBUG oslo_vmware.api [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654673, 'name': PowerOffVM_Task, 'duration_secs': 0.363161} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.048382] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.050285] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.050285] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84a9d6f2-52c1-4fbb-ae79-b7fc36ff47c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.117718] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.118162] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.119155] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleting the datastore file [datastore2] 14523914-68ab-4d39-8eb8-6a786ddcb4dc {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.119155] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e33780b-0c1c-4f79-aef6-d6b8c651fe66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.128782] env[62974]: DEBUG oslo_vmware.api [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 911.128782] env[62974]: value = "task-2654675" [ 911.128782] env[62974]: _type = "Task" [ 911.128782] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.144763] env[62974]: DEBUG oslo_vmware.api [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.176483] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.177206] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.337975] env[62974]: DEBUG nova.network.neutron [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updated VIF entry in instance network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 911.338470] env[62974]: DEBUG nova.network.neutron [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.415302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ec0294-fe19-4a85-8967-494f3937d9f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.424212] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8a56d0-42f4-419b-bd62-b7f25bee5fd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.456452] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d49c918-81e3-4fe4-ab53-a9bf655791a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.463923] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f979f7-d7be-41fa-9cb7-0e34801786fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.480056] env[62974]: DEBUG nova.compute.provider_tree [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.642822] env[62974]: DEBUG oslo_vmware.api [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23701} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.642930] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.643108] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.643368] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.643478] env[62974]: INFO nova.compute.manager [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 911.643793] env[62974]: DEBUG oslo.service.loopingcall [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.643982] env[62974]: DEBUG nova.compute.manager [-] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.644200] env[62974]: DEBUG nova.network.neutron [-] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.682855] env[62974]: DEBUG nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 911.726969] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.728441] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.728441] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.728548] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.729205] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.729205] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.729205] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.729463] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.729499] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.729677] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.730030] env[62974]: DEBUG nova.virt.hardware [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.731179] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6a9366-c38f-49fc-b36e-2590f3efbdb9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.740848] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4405594-5f51-40d2-8223-3cd287ce9bc6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.765600] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:ba:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1461ee04-30d1-4afa-b41b-26e9ea0dc08f', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.773466] env[62974]: DEBUG oslo.service.loopingcall [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.774349] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.778019] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ebf2007-b820-4ddf-93f2-b342671adfae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.803025] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.803025] env[62974]: value = "task-2654676" [ 911.803025] env[62974]: _type = "Task" [ 911.803025] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.810043] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654676, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.842610] env[62974]: DEBUG oslo_concurrency.lockutils [req-94973827-e9b1-4628-bd47-6dae25e46eb2 req-295f7ba5-ded4-4556-aba3-ec05f2b19e57 service nova] Releasing lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.983773] env[62974]: DEBUG nova.scheduler.client.report [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.044425] env[62974]: DEBUG nova.compute.manager [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.046309] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5524509-f33a-47b6-936a-16d671e56cfa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.213488] env[62974]: DEBUG nova.compute.manager [req-f90e4820-28f1-4030-859f-c0bcbd00b5b0 req-049de169-c867-44f5-9bc9-2b68a9339c8e service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Received event network-vif-deleted-e66d1ea1-70df-427f-8578-45c959a08ad6 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 912.217184] env[62974]: INFO nova.compute.manager [req-f90e4820-28f1-4030-859f-c0bcbd00b5b0 req-049de169-c867-44f5-9bc9-2b68a9339c8e service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Neutron deleted interface e66d1ea1-70df-427f-8578-45c959a08ad6; detaching it from the instance and deleting it from the info cache [ 912.217362] env[62974]: DEBUG nova.network.neutron [req-f90e4820-28f1-4030-859f-c0bcbd00b5b0 req-049de169-c867-44f5-9bc9-2b68a9339c8e service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.220471] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.313205] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654676, 'name': CreateVM_Task, 'duration_secs': 0.427517} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.313520] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.314702] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.315254] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.315869] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 912.316355] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5a4b67d-180f-4e27-883f-db3c8ab4308c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.323027] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 912.323027] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52aebd2d-f910-96b5-90d7-8614c38da35d" [ 912.323027] env[62974]: _type = "Task" [ 912.323027] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.335243] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52aebd2d-f910-96b5-90d7-8614c38da35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.475654] env[62974]: DEBUG nova.compute.manager [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Received event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 912.475654] env[62974]: DEBUG nova.compute.manager [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing instance network info cache due to event network-changed-1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 912.475654] env[62974]: DEBUG oslo_concurrency.lockutils [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] Acquiring lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.475654] env[62974]: DEBUG oslo_concurrency.lockutils [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] Acquired lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.475794] env[62974]: DEBUG nova.network.neutron [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Refreshing network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 912.491167] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.495410] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.661s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.495410] env[62974]: DEBUG nova.objects.instance [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lazy-loading 'resources' on Instance uuid b3827c67-9075-4a53-9f9e-8651e3f4b211 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.522518] env[62974]: INFO nova.scheduler.client.report [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Deleted allocations for instance a14e7e40-afef-4607-8fa9-935a92ea49dc [ 912.563759] env[62974]: INFO nova.compute.manager [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] instance snapshotting [ 912.569434] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb83a5c-df98-4bab-8fc8-10d00f27f96f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.595040] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e939e4f-7212-4691-a4f1-c150c87afd70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.686229] env[62974]: DEBUG nova.network.neutron [-] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.722394] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c9bc675-8f1e-4991-8031-3a84ce914652 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.732955] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514a2be8-6e46-4db7-b6c7-6e03800e6f33 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.767960] env[62974]: DEBUG nova.compute.manager [req-f90e4820-28f1-4030-859f-c0bcbd00b5b0 req-049de169-c867-44f5-9bc9-2b68a9339c8e service nova] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Detach interface failed, port_id=e66d1ea1-70df-427f-8578-45c959a08ad6, reason: Instance 14523914-68ab-4d39-8eb8-6a786ddcb4dc could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 912.841604] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52aebd2d-f910-96b5-90d7-8614c38da35d, 'name': SearchDatastore_Task, 'duration_secs': 0.019251} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.841991] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.842288] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.842580] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.842791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.843015] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.844152] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a54bbd5f-4e97-41a3-a0b7-184bb3e61137 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.855136] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.855362] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.856664] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec4ddc23-9e3b-482f-806f-99e4e4e5bdda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.863481] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 912.863481] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ae7a1a-d151-e1f2-8d8b-d73b32749dcc" [ 912.863481] env[62974]: _type = "Task" [ 912.863481] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.877173] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ae7a1a-d151-e1f2-8d8b-d73b32749dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.035911] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b850fe74-be8b-42ae-82d5-87871e8544e0 tempest-ServersTestBootFromVolume-77464477 tempest-ServersTestBootFromVolume-77464477-project-member] Lock "a14e7e40-afef-4607-8fa9-935a92ea49dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.923s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.107553] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 913.109858] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3da5bb2b-1582-4a0c-a44a-6c303eb9c965 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.120197] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 913.120197] env[62974]: value = "task-2654677" [ 913.120197] env[62974]: _type = "Task" [ 913.120197] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.134263] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654677, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.190583] env[62974]: INFO nova.compute.manager [-] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Took 1.55 seconds to deallocate network for instance. [ 913.376375] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ae7a1a-d151-e1f2-8d8b-d73b32749dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.013868} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.380276] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3e4b706-85c3-48b6-ad97-eb041588645d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.391225] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 913.391225] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520b3fee-81df-0950-2239-ff5754ed9a4b" [ 913.391225] env[62974]: _type = "Task" [ 913.391225] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.401192] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520b3fee-81df-0950-2239-ff5754ed9a4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.461180] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fab91a7-71c5-40ba-851e-57af3e1286c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.469908] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d589c10-c543-4e73-b3d3-9b68abeced13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.510455] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ef68db-6b86-47f9-ac4a-719b65d03ced {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.522162] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10cce93-586f-49bf-bb9a-8f6edcf55382 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.536819] env[62974]: DEBUG nova.compute.provider_tree [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.630586] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654677, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.672958] env[62974]: DEBUG nova.network.neutron [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updated VIF entry in instance network info cache for port 1f4c134a-f095-4872-9ffc-8b90d02f29f9. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.673409] env[62974]: DEBUG nova.network.neutron [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [{"id": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "address": "fa:16:3e:a4:3a:b7", "network": {"id": "5c2a6ae1-988c-454d-8e07-5d7a1409a818", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1746007152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8152f704e86645a0a7e7e81d9edabf30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f4c134a-f0", "ovs_interfaceid": "1f4c134a-f095-4872-9ffc-8b90d02f29f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.701509] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.906664] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520b3fee-81df-0950-2239-ff5754ed9a4b, 'name': SearchDatastore_Task, 'duration_secs': 0.013564} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.907885] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.907885] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.908214] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77be8626-5925-4222-8788-0c9ec1ddd202 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.915990] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 913.915990] env[62974]: value = "task-2654678" [ 913.915990] env[62974]: _type = "Task" [ 913.915990] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.929879] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654678, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.040460] env[62974]: DEBUG nova.scheduler.client.report [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 914.131501] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654677, 'name': CreateSnapshot_Task, 'duration_secs': 0.761501} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.131871] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 914.132772] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c4d56e-c349-4d0f-8171-b3f239a4d798 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.176074] env[62974]: DEBUG oslo_concurrency.lockutils [req-40e2dd57-e21e-41f5-859d-6fcfc356f100 req-080b805e-40e9-47f6-b850-37fca71a945f service nova] Releasing lock "refresh_cache-e23dbff7-d23e-4909-9b33-67ed15c325e7" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.426178] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654678, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.548173] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.552941] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.961s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.581330] env[62974]: INFO nova.scheduler.client.report [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Deleted allocations for instance b3827c67-9075-4a53-9f9e-8651e3f4b211 [ 914.653871] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 914.655419] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-33da6100-224d-4107-bba5-be71b5d52275 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.666957] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 914.666957] env[62974]: value = "task-2654679" [ 914.666957] env[62974]: _type = "Task" [ 914.666957] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.678258] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654679, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.928137] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654678, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595662} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.928534] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 914.928798] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 914.929111] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57928cea-b3ee-4390-83c0-ade428b72a57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.935638] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 914.935638] env[62974]: value = "task-2654680" [ 914.935638] env[62974]: _type = "Task" [ 914.935638] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.946255] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654680, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.060513] env[62974]: INFO nova.compute.claims [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.094723] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7c97487a-bc87-4b70-8119-dafe4b3e763a tempest-ServersAdminTestJSON-699970003 tempest-ServersAdminTestJSON-699970003-project-member] Lock "b3827c67-9075-4a53-9f9e-8651e3f4b211" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.907s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.180197] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654679, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.449425] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654680, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094521} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.449784] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 915.450642] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6f945a-d9ca-4ca7-8441-7b6630dfa56f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.474672] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 915.475030] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebd43a86-6dd5-445a-90a4-d567979d7619 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.494972] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 915.494972] env[62974]: value = "task-2654681" [ 915.494972] env[62974]: _type = "Task" [ 915.494972] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.503420] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654681, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.568336] env[62974]: INFO nova.compute.resource_tracker [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating resource usage from migration 72f105a4-6bb5-4b6d-9659-0904cb1114d8 [ 915.681940] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654679, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.939489] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3c85b5-1d68-4117-a76b-2e8962fe35aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.948263] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8119f8-c15f-42e6-9a2c-96186f2952a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.986812] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e0df92-6b2a-4ade-9a35-861fd32e5d80 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.995298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1904db57-37bc-4a0d-958e-5443e3acda79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.032299] env[62974]: DEBUG nova.compute.provider_tree [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.034384] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654681, 'name': ReconfigVM_Task, 'duration_secs': 0.342116} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.034851] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 3df97cea-5a6e-4d7a-b2f3-e02213816e24/3df97cea-5a6e-4d7a-b2f3-e02213816e24.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.036230] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'device_type': 'disk', 'boot_index': 0, 'encrypted': False, 'size': 0, 'encryption_options': None, 'encryption_secret_uuid': None, 'disk_bus': None, 'encryption_format': None, 'device_name': '/dev/sda', 'image_id': '807f8582-499f-47ee-9d5b-755c9f39bc39'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'mount_device': '/dev/sdb', 'guest_format': None, 'device_type': None, 'boot_index': None, 'attachment_id': '827ea418-429a-4d15-b85f-5af987ee48a2', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62974) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 916.036435] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 916.036622] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 916.037955] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9381fa3b-0272-4ab0-8272-3718a23ca2ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.055973] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5890b5b9-4de1-4169-ae83-6611f51c0d2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.082088] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] volume-13787642-ed9f-449c-b672-b1b3b50942b0/volume-13787642-ed9f-449c-b672-b1b3b50942b0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.082859] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a316918-3f17-4950-ad98-40d4a6608d73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.101339] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 916.101339] env[62974]: value = "task-2654682" [ 916.101339] env[62974]: _type = "Task" [ 916.101339] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.110266] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.179164] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654679, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.518476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.518476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.518476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.518476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.518798] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.519963] env[62974]: INFO nova.compute.manager [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Terminating instance [ 916.536160] env[62974]: DEBUG nova.scheduler.client.report [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.611007] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654682, 'name': ReconfigVM_Task, 'duration_secs': 0.316696} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.611331] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfigured VM instance instance-0000004a to attach disk [datastore1] volume-13787642-ed9f-449c-b672-b1b3b50942b0/volume-13787642-ed9f-449c-b672-b1b3b50942b0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.616015] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b01a1b9-0fad-4801-a2fc-c562f8936626 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.631789] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 916.631789] env[62974]: value = "task-2654683" [ 916.631789] env[62974]: _type = "Task" [ 916.631789] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.640558] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654683, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.678095] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654679, 'name': CloneVM_Task, 'duration_secs': 1.624146} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.678421] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Created linked-clone VM from snapshot [ 916.679308] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2310eb5b-d569-4492-bace-dc71bc83bddb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.689289] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Uploading image 9c51676c-8ad4-426e-8dd6-08396b1f4950 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 916.703208] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 916.703528] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-62325a57-a768-4b41-a587-4ae6496967c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.711122] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 916.711122] env[62974]: value = "task-2654684" [ 916.711122] env[62974]: _type = "Task" [ 916.711122] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.722146] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654684, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.025396] env[62974]: DEBUG nova.compute.manager [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 917.025396] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.026140] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86f0a2c-6ab2-4aad-a7c0-9cfbfea9166e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.034495] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.036260] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50478813-b1b3-4755-93e6-541095b03159 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.042014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.489s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.042570] env[62974]: INFO nova.compute.manager [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Migrating [ 917.059775] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.490s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.059775] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.059775] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 917.059775] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.586s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.061636] env[62974]: INFO nova.compute.claims [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.065029] env[62974]: DEBUG oslo_vmware.api [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 917.065029] env[62974]: value = "task-2654685" [ 917.065029] env[62974]: _type = "Task" [ 917.065029] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.068068] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb9dcfa-33fc-4eef-842f-9d1ccacd8b9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.091512] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4b884a-a5f3-4894-8235-a93c908576b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.096937] env[62974]: DEBUG oslo_vmware.api [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.111199] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd14dd66-c11e-467b-9560-1873bd837f15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.120209] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34a08e8-3081-4a30-bae9-8f5a65a5566f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.158046] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177630MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 917.158384] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.168508] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654683, 'name': ReconfigVM_Task, 'duration_secs': 0.299437} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.168871] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 917.169484] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-356d7b96-bb79-48e0-8483-95b33615016c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.177681] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 917.177681] env[62974]: value = "task-2654686" [ 917.177681] env[62974]: _type = "Task" [ 917.177681] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.187326] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654686, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.222850] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654684, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.584191] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.584191] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.584497] env[62974]: DEBUG nova.network.neutron [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.589436] env[62974]: DEBUG oslo_vmware.api [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654685, 'name': PowerOffVM_Task, 'duration_secs': 0.223916} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.590611] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.590780] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.591500] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d79951fd-8cd2-4925-b87a-81b2ff27f4e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.655354] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.655644] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.656365] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleting the datastore file [datastore2] 366b5816-a847-48d1-ad03-5758e473a9d0 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.656365] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fefbc998-0c82-4aee-b63d-46abba2a279c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.664896] env[62974]: DEBUG oslo_vmware.api [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for the task: (returnval){ [ 917.664896] env[62974]: value = "task-2654688" [ 917.664896] env[62974]: _type = "Task" [ 917.664896] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.673423] env[62974]: DEBUG oslo_vmware.api [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.689116] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654686, 'name': Rename_Task, 'duration_secs': 0.320971} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.690196] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.690196] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46e14b2e-d253-4887-86aa-126bfed4e1b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.696025] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 917.696025] env[62974]: value = "task-2654689" [ 917.696025] env[62974]: _type = "Task" [ 917.696025] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.706854] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.723893] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654684, 'name': Destroy_Task, 'duration_secs': 0.576153} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.724201] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Destroyed the VM [ 917.724611] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 917.724878] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ae55dc83-a1fc-4f4c-ab1b-0833853e6fcd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.733042] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 917.733042] env[62974]: value = "task-2654690" [ 917.733042] env[62974]: _type = "Task" [ 917.733042] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.742787] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654690, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.184207] env[62974]: DEBUG oslo_vmware.api [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Task: {'id': task-2654688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26132} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.184373] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.184469] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.186443] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.186443] env[62974]: INFO nova.compute.manager [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 918.186443] env[62974]: DEBUG oslo.service.loopingcall [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.186443] env[62974]: DEBUG nova.compute.manager [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.186443] env[62974]: DEBUG nova.network.neutron [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.210661] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654689, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.248940] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654690, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.510738] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e95c5e7-89f0-438c-b24e-87aa2692d1de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.520386] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5697fbb-3aef-44d6-a5bd-97d6cfe3fd54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.558654] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d852394-46fa-4987-8912-ce26351fa6eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.567065] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21cb2e4-1555-4e78-b188-939ccf5d3906 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.581446] env[62974]: DEBUG nova.compute.provider_tree [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.654848] env[62974]: DEBUG nova.network.neutron [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance_info_cache with network_info: [{"id": "39690695-af5c-4491-9d0f-b5ea691ce54f", "address": "fa:16:3e:1c:c5:83", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39690695-af", "ovs_interfaceid": "39690695-af5c-4491-9d0f-b5ea691ce54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.708427] env[62974]: DEBUG oslo_vmware.api [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654689, 'name': PowerOnVM_Task, 'duration_secs': 0.562292} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.708777] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.709083] env[62974]: DEBUG nova.compute.manager [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.710128] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150cfbe3-3078-42c9-a54c-c6764e4410e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.746613] env[62974]: DEBUG oslo_vmware.api [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654690, 'name': RemoveSnapshot_Task, 'duration_secs': 0.998001} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.748438] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 918.959927] env[62974]: DEBUG nova.compute.manager [req-581132b9-a2eb-4583-9df2-bee9bfdab534 req-a952c166-f259-4c5a-a280-8d4a4eabea22 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Received event network-vif-deleted-07b0aa8b-b38d-489b-9998-6efe6126083f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 918.959927] env[62974]: INFO nova.compute.manager [req-581132b9-a2eb-4583-9df2-bee9bfdab534 req-a952c166-f259-4c5a-a280-8d4a4eabea22 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Neutron deleted interface 07b0aa8b-b38d-489b-9998-6efe6126083f; detaching it from the instance and deleting it from the info cache [ 918.961735] env[62974]: DEBUG nova.network.neutron [req-581132b9-a2eb-4583-9df2-bee9bfdab534 req-a952c166-f259-4c5a-a280-8d4a4eabea22 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.090616] env[62974]: DEBUG nova.scheduler.client.report [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.160549] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.240873] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.252273] env[62974]: WARNING nova.compute.manager [None req-83dd7183-85db-4621-92a0-6d138312d755 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Image not found during snapshot: nova.exception.ImageNotFound: Image 9c51676c-8ad4-426e-8dd6-08396b1f4950 could not be found. [ 919.272342] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d79ad0-db73-a3cf-1377-390da8b11252/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 919.273832] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dd01f1-4c84-470d-acd9-eb200ee0a53d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.284262] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d79ad0-db73-a3cf-1377-390da8b11252/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 919.285562] env[62974]: ERROR oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d79ad0-db73-a3cf-1377-390da8b11252/disk-0.vmdk due to incomplete transfer. [ 919.285562] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-36824786-033c-40a6-9c6e-804dcbc85646 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.292817] env[62974]: DEBUG oslo_vmware.rw_handles [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d79ad0-db73-a3cf-1377-390da8b11252/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 919.293158] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Uploaded image b0128a71-8a8e-451d-82d7-854613873b6a to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 919.295705] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 919.295975] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8605c557-2e2d-4d59-94d6-598d4d8fe500 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.303054] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 919.303054] env[62974]: value = "task-2654691" [ 919.303054] env[62974]: _type = "Task" [ 919.303054] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.311897] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654691, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.347867] env[62974]: DEBUG nova.network.neutron [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.371638] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "ef54d01a-5d2c-448a-a060-37520de396ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.372922] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.469195] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61b5c01d-24fb-4c1d-b6f3-f90e24b12f21 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.480836] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a6efe9-4c36-4859-8126-0738e363a66a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.532714] env[62974]: DEBUG nova.compute.manager [req-581132b9-a2eb-4583-9df2-bee9bfdab534 req-a952c166-f259-4c5a-a280-8d4a4eabea22 service nova] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Detach interface failed, port_id=07b0aa8b-b38d-489b-9998-6efe6126083f, reason: Instance 366b5816-a847-48d1-ad03-5758e473a9d0 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 919.596426] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.596863] env[62974]: DEBUG nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 919.600768] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.928s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.601064] env[62974]: DEBUG nova.objects.instance [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lazy-loading 'resources' on Instance uuid 79448002-daa3-4afd-bd1b-36d734642a9e {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.819034] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654691, 'name': Destroy_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.852486] env[62974]: INFO nova.compute.manager [-] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Took 1.67 seconds to deallocate network for instance. [ 919.875578] env[62974]: DEBUG nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 920.112879] env[62974]: DEBUG nova.compute.utils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 920.119290] env[62974]: DEBUG nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 920.119290] env[62974]: DEBUG nova.network.neutron [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 920.204243] env[62974]: DEBUG nova.policy [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e07f9aeb1bcb40b0afdbab203e4b0210', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4f89fff8f1947ba86e9ecefa284b1fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 920.319812] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654691, 'name': Destroy_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.364571] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.407571] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.525421] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ff7a80-c4fb-437b-b163-f298cd82f438 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.533779] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da0cc92-a118-401c-8f31-dc38f44520db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.573330] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2605a61-73b8-4f80-aaff-66a97e0aa305 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.582373] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d87244-4c5b-4b50-87b5-a8af17003803 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.600619] env[62974]: DEBUG nova.compute.provider_tree [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.622029] env[62974]: DEBUG nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 920.678370] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9eedbd-1448-4630-9d2b-c96ed50d4021 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.696761] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance 'e11408df-466c-4101-b0cc-3621cda78a45' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 920.793983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.794499] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.794726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.794946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.795079] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.797086] env[62974]: INFO nova.compute.manager [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Terminating instance [ 920.813670] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654691, 'name': Destroy_Task, 'duration_secs': 1.380763} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.814128] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Destroyed the VM [ 920.814939] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 920.816224] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1fd7a089-e9fc-43a2-9db9-2729d2cfbe8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.824561] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 920.824561] env[62974]: value = "task-2654692" [ 920.824561] env[62974]: _type = "Task" [ 920.824561] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.836377] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654692, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.105110] env[62974]: DEBUG nova.scheduler.client.report [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.182309] env[62974]: DEBUG nova.network.neutron [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Successfully created port: 09eaa85f-9e94-4988-9a61-7595a0fbe90c {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 921.202876] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.203220] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce5d8cf1-fe06-49e8-86da-27dc907ce956 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.211242] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 921.211242] env[62974]: value = "task-2654693" [ 921.211242] env[62974]: _type = "Task" [ 921.211242] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.223893] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.305016] env[62974]: DEBUG nova.compute.manager [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.305156] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.309024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0593fc-b5d0-4da4-abf6-cd74bfb85a5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.316463] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.317478] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd2dd515-8963-4270-acec-7da8756e51e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.324868] env[62974]: DEBUG oslo_vmware.api [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 921.324868] env[62974]: value = "task-2654694" [ 921.324868] env[62974]: _type = "Task" [ 921.324868] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.345588] env[62974]: DEBUG oslo_vmware.api [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.348228] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654692, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.603441] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.603589] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "18489c02-5958-431f-aede-f554d0d785ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.610334] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.615543] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.426s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.616589] env[62974]: DEBUG nova.objects.instance [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lazy-loading 'resources' on Instance uuid c08ed924-9b7d-4773-8e49-c57ecfb27d03 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.636661] env[62974]: DEBUG nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 921.652083] env[62974]: INFO nova.scheduler.client.report [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Deleted allocations for instance 79448002-daa3-4afd-bd1b-36d734642a9e [ 921.672031] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 921.672031] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.672031] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 921.672246] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.672314] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 921.672476] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 921.672738] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 921.673527] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 921.673527] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 921.673527] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 921.673527] env[62974]: DEBUG nova.virt.hardware [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 921.674861] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b7b8f7-445c-4943-9a14-25bd2e29b893 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.684103] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd24569a-1668-4da3-9296-78191b860e82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.722069] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654693, 'name': PowerOffVM_Task, 'duration_secs': 0.348724} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.723053] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.723297] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance 'e11408df-466c-4101-b0cc-3621cda78a45' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 921.839092] env[62974]: DEBUG oslo_vmware.api [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654692, 'name': RemoveSnapshot_Task, 'duration_secs': 0.5418} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.842086] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 921.842348] env[62974]: INFO nova.compute.manager [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Took 15.77 seconds to snapshot the instance on the hypervisor. [ 921.844681] env[62974]: DEBUG oslo_vmware.api [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654694, 'name': PowerOffVM_Task, 'duration_secs': 0.261253} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.845457] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.845557] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.845801] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1b8b1d3-b47d-4a33-a998-25186ab41a43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.908096] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.908096] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.908096] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleting the datastore file [datastore2] 226f3328-e3b1-4ae1-8b7c-349b552cf5a2 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.908096] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4f07b92-b785-47c8-b569-ba954adff91c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.914805] env[62974]: DEBUG oslo_vmware.api [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 921.914805] env[62974]: value = "task-2654696" [ 921.914805] env[62974]: _type = "Task" [ 921.914805] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.922789] env[62974]: DEBUG oslo_vmware.api [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.107060] env[62974]: DEBUG nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 922.162957] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b1983ca-ce84-4f9c-9d9e-a681cf89a9a7 tempest-AttachInterfacesV270Test-1970420271 tempest-AttachInterfacesV270Test-1970420271-project-member] Lock "79448002-daa3-4afd-bd1b-36d734642a9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.550s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.232540] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 922.232693] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 922.232844] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 922.232973] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 922.233131] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 922.233282] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 922.233486] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 922.233640] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 922.233799] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 922.233958] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 922.234184] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 922.239574] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e8453eb-f0b3-411c-a2c2-985471d2a470 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.258469] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 922.258469] env[62974]: value = "task-2654697" [ 922.258469] env[62974]: _type = "Task" [ 922.258469] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.271081] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654697, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.397751] env[62974]: DEBUG nova.compute.manager [None req-18c74a1d-c954-4f1d-a5dc-7dca28e25eff tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Found 2 images (rotation: 2) {{(pid=62974) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 922.429537] env[62974]: DEBUG oslo_vmware.api [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129831} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.429673] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.430260] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.430260] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.430418] env[62974]: INFO nova.compute.manager [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 922.430663] env[62974]: DEBUG oslo.service.loopingcall [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.430877] env[62974]: DEBUG nova.compute.manager [-] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 922.430970] env[62974]: DEBUG nova.network.neutron [-] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.478794] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941bc2c0-c61b-4b76-b5b4-671529eaf701 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.489781] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04677f87-ffb5-4b1a-b567-29062ae5e379 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.532977] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb14a89-8303-40d3-a551-e8ff7989d49f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.542719] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdccf12-72ec-471c-9bc3-62e37fd64f45 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.560606] env[62974]: DEBUG nova.compute.provider_tree [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.641497] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.771902] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654697, 'name': ReconfigVM_Task, 'duration_secs': 0.305564} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.772297] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance 'e11408df-466c-4101-b0cc-3621cda78a45' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 922.945439] env[62974]: DEBUG nova.compute.manager [req-f3a3f990-ccef-4002-92d2-e694056bcba7 req-6ff43e9f-f0e6-4b1f-8b2e-53d7c0b42e35 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Received event network-vif-deleted-2a679a79-ea4e-44c9-8a79-e5088ad88d84 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 922.945708] env[62974]: INFO nova.compute.manager [req-f3a3f990-ccef-4002-92d2-e694056bcba7 req-6ff43e9f-f0e6-4b1f-8b2e-53d7c0b42e35 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Neutron deleted interface 2a679a79-ea4e-44c9-8a79-e5088ad88d84; detaching it from the instance and deleting it from the info cache [ 922.945904] env[62974]: DEBUG nova.network.neutron [req-f3a3f990-ccef-4002-92d2-e694056bcba7 req-6ff43e9f-f0e6-4b1f-8b2e-53d7c0b42e35 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.063837] env[62974]: DEBUG nova.scheduler.client.report [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.076192] env[62974]: DEBUG nova.network.neutron [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Successfully updated port: 09eaa85f-9e94-4988-9a61-7595a0fbe90c {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 923.188079] env[62974]: DEBUG nova.network.neutron [-] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.279996] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.280389] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.284027] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.284027] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.284027] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.284027] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.284027] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.284471] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.284471] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.284471] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.284471] env[62974]: DEBUG nova.virt.hardware [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.289709] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Reconfiguring VM instance instance-0000004f to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 923.290086] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de205d90-1f67-4ee0-921c-ffa571308130 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.314985] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 923.314985] env[62974]: value = "task-2654698" [ 923.314985] env[62974]: _type = "Task" [ 923.314985] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.323421] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654698, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.329786] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "92c80524-0fb6-4f28-9a72-bc4ab5793558" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.329786] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.454184] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f59bace0-a65a-480c-bb33-ab50491ac6bc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.467328] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec92b956-bdf7-438a-a257-676cb5abf299 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.498239] env[62974]: DEBUG nova.compute.manager [req-f3a3f990-ccef-4002-92d2-e694056bcba7 req-6ff43e9f-f0e6-4b1f-8b2e-53d7c0b42e35 service nova] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Detach interface failed, port_id=2a679a79-ea4e-44c9-8a79-e5088ad88d84, reason: Instance 226f3328-e3b1-4ae1-8b7c-349b552cf5a2 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 923.571919] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.956s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.575841] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.159s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.578347] env[62974]: INFO nova.compute.claims [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.580785] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "refresh_cache-32b17ff4-f7e1-498d-aef7-162f81cd5feb" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.580982] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquired lock "refresh_cache-32b17ff4-f7e1-498d-aef7-162f81cd5feb" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.581118] env[62974]: DEBUG nova.network.neutron [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.602960] env[62974]: INFO nova.scheduler.client.report [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Deleted allocations for instance c08ed924-9b7d-4773-8e49-c57ecfb27d03 [ 923.691210] env[62974]: INFO nova.compute.manager [-] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Took 1.26 seconds to deallocate network for instance. [ 923.710153] env[62974]: DEBUG nova.compute.manager [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.711091] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3716b2-d524-49a9-ba0f-8078d86ae141 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.828212] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654698, 'name': ReconfigVM_Task, 'duration_secs': 0.38373} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.831260] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Reconfigured VM instance instance-0000004f to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 923.831260] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35099e02-2248-4d9a-b6c3-371b994b9efe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.851323] env[62974]: DEBUG nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 923.862528] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] e11408df-466c-4101-b0cc-3621cda78a45/e11408df-466c-4101-b0cc-3621cda78a45.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 923.863418] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e770b2d-e053-40f1-a0bd-92e0ced883ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.887092] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 923.887092] env[62974]: value = "task-2654699" [ 923.887092] env[62974]: _type = "Task" [ 923.887092] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.896202] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654699, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.110276] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8d2ebe0-40d0-40af-9a68-77f06334762e tempest-ServerAddressesTestJSON-886822839 tempest-ServerAddressesTestJSON-886822839-project-member] Lock "c08ed924-9b7d-4773-8e49-c57ecfb27d03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.490s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.168058] env[62974]: DEBUG nova.network.neutron [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.200486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.225137] env[62974]: INFO nova.compute.manager [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] instance snapshotting [ 924.225749] env[62974]: DEBUG nova.objects.instance [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'flavor' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.388832] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.399238] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654699, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.400648] env[62974]: DEBUG nova.network.neutron [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Updating instance_info_cache with network_info: [{"id": "09eaa85f-9e94-4988-9a61-7595a0fbe90c", "address": "fa:16:3e:df:28:b4", "network": {"id": "be3f29da-2251-418c-87d0-cb3d9bc44075", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1147336006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4f89fff8f1947ba86e9ecefa284b1fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d12aff80-9d1b-4a67-a470-9c0148b443e3", "external-id": "nsx-vlan-transportzone-784", "segmentation_id": 784, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09eaa85f-9e", "ovs_interfaceid": "09eaa85f-9e94-4988-9a61-7595a0fbe90c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.732984] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595a8eac-d69f-45ad-acd4-2396a72f6fa8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.755898] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0601f4f6-0993-4c3e-8c14-98372ac231c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.903715] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Releasing lock "refresh_cache-32b17ff4-f7e1-498d-aef7-162f81cd5feb" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.904064] env[62974]: DEBUG nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Instance network_info: |[{"id": "09eaa85f-9e94-4988-9a61-7595a0fbe90c", "address": "fa:16:3e:df:28:b4", "network": {"id": "be3f29da-2251-418c-87d0-cb3d9bc44075", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1147336006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4f89fff8f1947ba86e9ecefa284b1fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d12aff80-9d1b-4a67-a470-9c0148b443e3", "external-id": "nsx-vlan-transportzone-784", "segmentation_id": 784, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09eaa85f-9e", "ovs_interfaceid": "09eaa85f-9e94-4988-9a61-7595a0fbe90c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 924.905403] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654699, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.905403] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:28:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd12aff80-9d1b-4a67-a470-9c0148b443e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09eaa85f-9e94-4988-9a61-7595a0fbe90c', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.912486] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Creating folder: Project (f4f89fff8f1947ba86e9ecefa284b1fa). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 924.915636] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a88872a7-9d0c-44dd-95d9-992fba13459a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.926529] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Created folder: Project (f4f89fff8f1947ba86e9ecefa284b1fa) in parent group-v535199. [ 924.926769] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Creating folder: Instances. Parent ref: group-v535435. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 924.927068] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb4e9ed4-46f9-4e67-96a9-e1c058d3b2d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.939353] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Created folder: Instances in parent group-v535435. [ 924.939353] env[62974]: DEBUG oslo.service.loopingcall [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.939353] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.939353] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e18f1da6-45aa-4a30-8814-3d480dc508be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.953953] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099ea821-c5bb-44aa-86e0-81d16f5e2d8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.962923] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e11fc5f-de52-4cce-9161-26ded977cbe5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.966905] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.966905] env[62974]: value = "task-2654702" [ 924.966905] env[62974]: _type = "Task" [ 924.966905] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.997121] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b372491-6f4e-4b4a-9ab7-46eeaabc2778 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.003807] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654702, 'name': CreateVM_Task} progress is 15%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.009167] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b7b1a8-eb95-496f-ab58-d13d61bb255b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.028249] env[62974]: DEBUG nova.compute.provider_tree [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.052809] env[62974]: DEBUG nova.compute.manager [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Received event network-vif-plugged-09eaa85f-9e94-4988-9a61-7595a0fbe90c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 925.053666] env[62974]: DEBUG oslo_concurrency.lockutils [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] Acquiring lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.055530] env[62974]: DEBUG oslo_concurrency.lockutils [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.055530] env[62974]: DEBUG oslo_concurrency.lockutils [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.055530] env[62974]: DEBUG nova.compute.manager [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] No waiting events found dispatching network-vif-plugged-09eaa85f-9e94-4988-9a61-7595a0fbe90c {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 925.055530] env[62974]: WARNING nova.compute.manager [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Received unexpected event network-vif-plugged-09eaa85f-9e94-4988-9a61-7595a0fbe90c for instance with vm_state building and task_state spawning. [ 925.055530] env[62974]: DEBUG nova.compute.manager [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Received event network-changed-09eaa85f-9e94-4988-9a61-7595a0fbe90c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 925.055738] env[62974]: DEBUG nova.compute.manager [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Refreshing instance network info cache due to event network-changed-09eaa85f-9e94-4988-9a61-7595a0fbe90c. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 925.055738] env[62974]: DEBUG oslo_concurrency.lockutils [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] Acquiring lock "refresh_cache-32b17ff4-f7e1-498d-aef7-162f81cd5feb" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.055738] env[62974]: DEBUG oslo_concurrency.lockutils [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] Acquired lock "refresh_cache-32b17ff4-f7e1-498d-aef7-162f81cd5feb" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.055738] env[62974]: DEBUG nova.network.neutron [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Refreshing network info cache for port 09eaa85f-9e94-4988-9a61-7595a0fbe90c {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 925.272475] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 925.272475] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7824b5c3-4db4-40e2-a432-383abd4cb0a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.277370] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 925.277370] env[62974]: value = "task-2654703" [ 925.277370] env[62974]: _type = "Task" [ 925.277370] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.289528] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654703, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.402122] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654699, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.478764] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654702, 'name': CreateVM_Task, 'duration_secs': 0.332485} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.478764] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.478764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.478764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.479226] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 925.479982] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e502e1ba-dfac-409a-b447-9b584e234e4c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.484292] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 925.484292] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a8f400-e7e3-0254-dc11-4d27d7584075" [ 925.484292] env[62974]: _type = "Task" [ 925.484292] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.492309] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a8f400-e7e3-0254-dc11-4d27d7584075, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.533864] env[62974]: DEBUG nova.scheduler.client.report [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 925.787298] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654703, 'name': CreateSnapshot_Task, 'duration_secs': 0.46642} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.787612] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 925.788456] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f67705d-414f-48b9-bc6a-9d316b1d131e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.902748] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654699, 'name': ReconfigVM_Task, 'duration_secs': 1.529898} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.903269] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Reconfigured VM instance instance-0000004f to attach disk [datastore1] e11408df-466c-4101-b0cc-3621cda78a45/e11408df-466c-4101-b0cc-3621cda78a45.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.903523] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance 'e11408df-466c-4101-b0cc-3621cda78a45' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 925.995416] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a8f400-e7e3-0254-dc11-4d27d7584075, 'name': SearchDatastore_Task, 'duration_secs': 0.008976} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.995950] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.995950] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.996218] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.996360] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.996536] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.996812] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bc875af-273b-4985-93c4-ed2a755661d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.006423] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.006739] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.007726] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e85ea6be-8404-4cbf-961b-afa213238a83 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.014476] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 926.014476] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219d481-7056-b86c-b863-cceb56a3c6f9" [ 926.014476] env[62974]: _type = "Task" [ 926.014476] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.022912] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219d481-7056-b86c-b863-cceb56a3c6f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.036763] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.037237] env[62974]: DEBUG nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 926.039876] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.418s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.040125] env[62974]: DEBUG nova.objects.instance [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lazy-loading 'resources' on Instance uuid eb8647c7-f5e1-4de5-8321-9a9ecff5961c {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.090837] env[62974]: DEBUG nova.network.neutron [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Updated VIF entry in instance network info cache for port 09eaa85f-9e94-4988-9a61-7595a0fbe90c. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 926.090837] env[62974]: DEBUG nova.network.neutron [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Updating instance_info_cache with network_info: [{"id": "09eaa85f-9e94-4988-9a61-7595a0fbe90c", "address": "fa:16:3e:df:28:b4", "network": {"id": "be3f29da-2251-418c-87d0-cb3d9bc44075", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1147336006-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4f89fff8f1947ba86e9ecefa284b1fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d12aff80-9d1b-4a67-a470-9c0148b443e3", "external-id": "nsx-vlan-transportzone-784", "segmentation_id": 784, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09eaa85f-9e", "ovs_interfaceid": "09eaa85f-9e94-4988-9a61-7595a0fbe90c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.309657] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 926.310547] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c3e23c81-3470-440d-b65a-2553736e17a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.320259] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 926.320259] env[62974]: value = "task-2654704" [ 926.320259] env[62974]: _type = "Task" [ 926.320259] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.329159] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654704, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.413620] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc641a8-3e14-478c-92d6-cb1bbc985f65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.435957] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a013a8-77a9-49f0-a753-2d6652df2b10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.454421] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance 'e11408df-466c-4101-b0cc-3621cda78a45' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.527627] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5219d481-7056-b86c-b863-cceb56a3c6f9, 'name': SearchDatastore_Task, 'duration_secs': 0.011191} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.528650] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1f910bc-42d6-4943-b731-2730e65c5499 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.538317] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 926.538317] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5253eb49-0d38-5a6b-ac02-77c9773060ae" [ 926.538317] env[62974]: _type = "Task" [ 926.538317] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.546234] env[62974]: DEBUG nova.compute.utils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.555277] env[62974]: DEBUG nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 926.555623] env[62974]: DEBUG nova.network.neutron [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.573514] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5253eb49-0d38-5a6b-ac02-77c9773060ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.594067] env[62974]: DEBUG oslo_concurrency.lockutils [req-31ce36a9-95a6-4900-a595-8b7ae80fd689 req-f1cf25c4-a1c0-4404-8180-61e067e23187 service nova] Releasing lock "refresh_cache-32b17ff4-f7e1-498d-aef7-162f81cd5feb" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.637438] env[62974]: DEBUG nova.policy [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a86bbc98ec50467792b3c6a6cedc790b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14dd4a9a77ad40458d40bb82ac4b90a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 926.834906] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654704, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.977416] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c781c05e-2013-4452-b8c6-46bf7576ff82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.988305] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a27b952-f6c0-4444-8351-77f69491fd86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.029362] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96531c58-1bc3-4830-8751-110391621cb1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.037293] env[62974]: DEBUG nova.network.neutron [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Port 39690695-af5c-4491-9d0f-b5ea691ce54f binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 927.039963] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86045322-adb0-4967-bd25-b7e723d24c47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.051093] env[62974]: DEBUG nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 927.071143] env[62974]: DEBUG nova.compute.provider_tree [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.073630] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5253eb49-0d38-5a6b-ac02-77c9773060ae, 'name': SearchDatastore_Task, 'duration_secs': 0.030812} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.074793] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.075373] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 32b17ff4-f7e1-498d-aef7-162f81cd5feb/32b17ff4-f7e1-498d-aef7-162f81cd5feb.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 927.078685] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95e91e4e-05fc-447c-bd3f-b483370650fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.091080] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 927.091080] env[62974]: value = "task-2654705" [ 927.091080] env[62974]: _type = "Task" [ 927.091080] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.101040] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.144706] env[62974]: DEBUG nova.network.neutron [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Successfully created port: c027407f-1e07-4406-a05e-d7820d25a2bf {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.332737] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654704, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.580488] env[62974]: DEBUG nova.scheduler.client.report [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.584181] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.584181] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.587060] env[62974]: INFO nova.compute.manager [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Rebooting instance [ 927.604751] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654705, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.837140] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654704, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.072584] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "e11408df-466c-4101-b0cc-3621cda78a45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.076023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.076023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.085780] env[62974]: DEBUG nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 928.091224] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.100338] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.880s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.102051] env[62974]: INFO nova.compute.claims [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.121878] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.759373} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.121878] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 32b17ff4-f7e1-498d-aef7-162f81cd5feb/32b17ff4-f7e1-498d-aef7-162f81cd5feb.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.121878] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 928.121878] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c94b62b-a9c9-4f64-a661-b8f262ab0063 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.123280] env[62974]: INFO nova.scheduler.client.report [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Deleted allocations for instance eb8647c7-f5e1-4de5-8321-9a9ecff5961c [ 928.134153] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 928.134153] env[62974]: value = "task-2654706" [ 928.134153] env[62974]: _type = "Task" [ 928.134153] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.143548] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 928.143811] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.143963] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 928.144159] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.144304] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 928.144610] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 928.144980] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 928.144980] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 928.145215] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 928.145270] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 928.145493] env[62974]: DEBUG nova.virt.hardware [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 928.147127] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.147330] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.147527] env[62974]: DEBUG nova.network.neutron [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.149545] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1476c0ff-5c22-4b67-9322-eedeadde942c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.158395] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654706, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.162355] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e44f21a-1982-4cc1-bcd1-b52b87fac6f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.332493] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654704, 'name': CloneVM_Task, 'duration_secs': 1.878712} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.332921] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Created linked-clone VM from snapshot [ 928.333553] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d8a98e-597e-4e40-a1f8-f994af98136b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.342807] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Uploading image c8e3e92f-78f7-4298-9214-5d340b78d888 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 928.373427] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 928.373427] env[62974]: value = "vm-535439" [ 928.373427] env[62974]: _type = "VirtualMachine" [ 928.373427] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 928.373714] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bc3b53ad-763c-4a7d-9b6f-a3dc00c52a92 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.382416] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease: (returnval){ [ 928.382416] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe6140-6dbc-2f9b-7ae6-7968b71e5746" [ 928.382416] env[62974]: _type = "HttpNfcLease" [ 928.382416] env[62974]: } obtained for exporting VM: (result){ [ 928.382416] env[62974]: value = "vm-535439" [ 928.382416] env[62974]: _type = "VirtualMachine" [ 928.382416] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 928.382729] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the lease: (returnval){ [ 928.382729] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe6140-6dbc-2f9b-7ae6-7968b71e5746" [ 928.382729] env[62974]: _type = "HttpNfcLease" [ 928.382729] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 928.389790] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 928.389790] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe6140-6dbc-2f9b-7ae6-7968b71e5746" [ 928.389790] env[62974]: _type = "HttpNfcLease" [ 928.389790] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 928.640916] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e1133cae-a33a-4d15-8c3d-e65460b50c48 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "eb8647c7-f5e1-4de5-8321-9a9ecff5961c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.825s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.649371] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654706, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.227874} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.649717] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.650619] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201250f4-e5e0-4d83-ba3e-e651e32f5c62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.680036] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 32b17ff4-f7e1-498d-aef7-162f81cd5feb/32b17ff4-f7e1-498d-aef7-162f81cd5feb.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.680036] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3655722a-e292-4d15-8e11-e0221f42f6a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.699336] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 928.699336] env[62974]: value = "task-2654709" [ 928.699336] env[62974]: _type = "Task" [ 928.699336] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.707953] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654709, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.786167] env[62974]: DEBUG nova.compute.manager [req-92536449-9ee1-4426-8652-ee73bc5a1e51 req-5b06cd38-007a-4093-985d-c59cc6f5f3c9 service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Received event network-vif-plugged-c027407f-1e07-4406-a05e-d7820d25a2bf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 928.786167] env[62974]: DEBUG oslo_concurrency.lockutils [req-92536449-9ee1-4426-8652-ee73bc5a1e51 req-5b06cd38-007a-4093-985d-c59cc6f5f3c9 service nova] Acquiring lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.786167] env[62974]: DEBUG oslo_concurrency.lockutils [req-92536449-9ee1-4426-8652-ee73bc5a1e51 req-5b06cd38-007a-4093-985d-c59cc6f5f3c9 service nova] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.786301] env[62974]: DEBUG oslo_concurrency.lockutils [req-92536449-9ee1-4426-8652-ee73bc5a1e51 req-5b06cd38-007a-4093-985d-c59cc6f5f3c9 service nova] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.786939] env[62974]: DEBUG nova.compute.manager [req-92536449-9ee1-4426-8652-ee73bc5a1e51 req-5b06cd38-007a-4093-985d-c59cc6f5f3c9 service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] No waiting events found dispatching network-vif-plugged-c027407f-1e07-4406-a05e-d7820d25a2bf {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 928.786939] env[62974]: WARNING nova.compute.manager [req-92536449-9ee1-4426-8652-ee73bc5a1e51 req-5b06cd38-007a-4093-985d-c59cc6f5f3c9 service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Received unexpected event network-vif-plugged-c027407f-1e07-4406-a05e-d7820d25a2bf for instance with vm_state building and task_state spawning. [ 928.892975] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 928.892975] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe6140-6dbc-2f9b-7ae6-7968b71e5746" [ 928.892975] env[62974]: _type = "HttpNfcLease" [ 928.892975] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 928.893548] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 928.893548] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe6140-6dbc-2f9b-7ae6-7968b71e5746" [ 928.893548] env[62974]: _type = "HttpNfcLease" [ 928.893548] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 928.895147] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce8e504-ee5c-4962-a1ab-5d62908372d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.902151] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9ecd-cc61-b557-5307-ddd3a35d5d0a/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 928.902627] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9ecd-cc61-b557-5307-ddd3a35d5d0a/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 928.976166] env[62974]: DEBUG nova.network.neutron [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Successfully updated port: c027407f-1e07-4406-a05e-d7820d25a2bf {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 929.019725] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c4474e45-a3dc-42cc-b63a-3b217b751cb3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.024964] env[62974]: DEBUG nova.network.neutron [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.125703] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.125703] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.125703] env[62974]: DEBUG nova.network.neutron [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.213564] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654709, 'name': ReconfigVM_Task, 'duration_secs': 0.322562} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.213889] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 32b17ff4-f7e1-498d-aef7-162f81cd5feb/32b17ff4-f7e1-498d-aef7-162f81cd5feb.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.214644] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd8096a8-f060-4267-b898-c6cd8fcc60ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.223139] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 929.223139] env[62974]: value = "task-2654710" [ 929.223139] env[62974]: _type = "Task" [ 929.223139] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.232686] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654710, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.483032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-cc7c25b5-1463-4eab-8d8f-f812d4f16c34" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.483032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-cc7c25b5-1463-4eab-8d8f-f812d4f16c34" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.483032] env[62974]: DEBUG nova.network.neutron [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.485612] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65cea27-8c4c-474d-8bf7-53169326c5ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.501585] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86f4ed2-c6d8-46c2-846e-82e91359fdc8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.537981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.540019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ce4d66-f3bc-4873-a5c4-adc1300c373e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.550830] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3726cf-cad5-45d3-a9de-9f730982c1ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.568036] env[62974]: DEBUG nova.compute.provider_tree [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.734884] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654710, 'name': Rename_Task, 'duration_secs': 0.179564} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.735469] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.735884] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76f322ca-954b-4e89-848f-0b852ff30a40 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.742798] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 929.742798] env[62974]: value = "task-2654711" [ 929.742798] env[62974]: _type = "Task" [ 929.742798] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.757285] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.856853] env[62974]: DEBUG nova.network.neutron [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance_info_cache with network_info: [{"id": "39690695-af5c-4491-9d0f-b5ea691ce54f", "address": "fa:16:3e:1c:c5:83", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39690695-af", "ovs_interfaceid": "39690695-af5c-4491-9d0f-b5ea691ce54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.950430] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.950670] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.029348] env[62974]: DEBUG nova.network.neutron [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.042305] env[62974]: DEBUG nova.compute.manager [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.043218] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbba452-a909-4dff-b2ef-9826094828b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.071889] env[62974]: DEBUG nova.scheduler.client.report [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.198075] env[62974]: DEBUG nova.network.neutron [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Updating instance_info_cache with network_info: [{"id": "c027407f-1e07-4406-a05e-d7820d25a2bf", "address": "fa:16:3e:95:3d:b5", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc027407f-1e", "ovs_interfaceid": "c027407f-1e07-4406-a05e-d7820d25a2bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.254566] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654711, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.359709] env[62974]: DEBUG oslo_concurrency.lockutils [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.456942] env[62974]: DEBUG nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 930.577472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.578070] env[62974]: DEBUG nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 930.582336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.881s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.582503] env[62974]: DEBUG nova.objects.instance [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lazy-loading 'resources' on Instance uuid 14523914-68ab-4d39-8eb8-6a786ddcb4dc {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.701752] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-cc7c25b5-1463-4eab-8d8f-f812d4f16c34" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.702320] env[62974]: DEBUG nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Instance network_info: |[{"id": "c027407f-1e07-4406-a05e-d7820d25a2bf", "address": "fa:16:3e:95:3d:b5", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc027407f-1e", "ovs_interfaceid": "c027407f-1e07-4406-a05e-d7820d25a2bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 930.702949] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:3d:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c027407f-1e07-4406-a05e-d7820d25a2bf', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.718597] env[62974]: DEBUG oslo.service.loopingcall [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.718597] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.718754] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d169668-f774-4c3e-8b1c-59ebe6e4e43a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.740186] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.740186] env[62974]: value = "task-2654712" [ 930.740186] env[62974]: _type = "Task" [ 930.740186] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.753216] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654712, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.757789] env[62974]: DEBUG oslo_vmware.api [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654711, 'name': PowerOnVM_Task, 'duration_secs': 0.61806} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.757789] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.757789] env[62974]: INFO nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Took 9.12 seconds to spawn the instance on the hypervisor. [ 930.757960] env[62974]: DEBUG nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.758653] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e660bc-e490-4005-9fd8-f5e67ce5cc7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.830767] env[62974]: DEBUG nova.compute.manager [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Received event network-changed-c027407f-1e07-4406-a05e-d7820d25a2bf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 930.831051] env[62974]: DEBUG nova.compute.manager [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Refreshing instance network info cache due to event network-changed-c027407f-1e07-4406-a05e-d7820d25a2bf. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 930.832452] env[62974]: DEBUG oslo_concurrency.lockutils [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] Acquiring lock "refresh_cache-cc7c25b5-1463-4eab-8d8f-f812d4f16c34" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.832452] env[62974]: DEBUG oslo_concurrency.lockutils [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] Acquired lock "refresh_cache-cc7c25b5-1463-4eab-8d8f-f812d4f16c34" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.832452] env[62974]: DEBUG nova.network.neutron [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Refreshing network info cache for port c027407f-1e07-4406-a05e-d7820d25a2bf {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.889989] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f55b14b-09fa-4bd2-91e5-d2badfee643c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.909617] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087f55ac-17ad-48d1-8c66-ea4f34dc0f52 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.916589] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance 'e11408df-466c-4101-b0cc-3621cda78a45' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 930.979139] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.063437] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743cdfe2-49aa-4420-ae74-df1f698fad5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.071845] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Doing hard reboot of VM {{(pid=62974) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 931.072056] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-ec1c17d6-4536-4f7d-b796-589a8c1abc29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.078391] env[62974]: DEBUG oslo_vmware.api [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 931.078391] env[62974]: value = "task-2654713" [ 931.078391] env[62974]: _type = "Task" [ 931.078391] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.087911] env[62974]: DEBUG nova.compute.utils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 931.092065] env[62974]: DEBUG oslo_vmware.api [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654713, 'name': ResetVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.092849] env[62974]: DEBUG nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 931.093104] env[62974]: DEBUG nova.network.neutron [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.220109] env[62974]: DEBUG nova.policy [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e07ae60010640d88de0d3b716914186', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd914830aaf454e26b77cbb46722764ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 931.250514] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654712, 'name': CreateVM_Task, 'duration_secs': 0.432309} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.253059] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.254011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.254105] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.254431] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 931.254699] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4e67b40-885b-40c9-a0d7-0436ad94352a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.259517] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 931.259517] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f62d10-925e-28a7-070f-42e2aeed6430" [ 931.259517] env[62974]: _type = "Task" [ 931.259517] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.275247] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f62d10-925e-28a7-070f-42e2aeed6430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.281106] env[62974]: INFO nova.compute.manager [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Took 26.83 seconds to build instance. [ 931.422664] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 931.425786] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-502b9252-f0f1-4649-acf9-e65285954463 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.432088] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 931.432088] env[62974]: value = "task-2654714" [ 931.432088] env[62974]: _type = "Task" [ 931.432088] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.441167] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654714, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.442780] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397cb5d3-a185-4383-a486-8c0e33abcb1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.450305] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c077b113-75db-4043-8b44-a18e4848507b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.492708] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7004a72-0af6-4ca5-9a10-9583f8da6576 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.499400] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f257d7-2f25-493d-a72d-bd2532225afe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.515508] env[62974]: DEBUG nova.compute.provider_tree [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.589447] env[62974]: DEBUG oslo_vmware.api [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654713, 'name': ResetVM_Task, 'duration_secs': 0.111716} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.589760] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Did hard reboot of VM {{(pid=62974) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 931.589956] env[62974]: DEBUG nova.compute.manager [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 931.590840] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abafa571-043a-448d-b1c3-bdc88f6f9e26 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.593703] env[62974]: DEBUG nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 931.646891] env[62974]: DEBUG nova.network.neutron [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Successfully created port: fd4e4478-3958-46b8-a54d-e6619377d377 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.722208] env[62974]: DEBUG nova.network.neutron [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Updated VIF entry in instance network info cache for port c027407f-1e07-4406-a05e-d7820d25a2bf. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.722649] env[62974]: DEBUG nova.network.neutron [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Updating instance_info_cache with network_info: [{"id": "c027407f-1e07-4406-a05e-d7820d25a2bf", "address": "fa:16:3e:95:3d:b5", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc027407f-1e", "ovs_interfaceid": "c027407f-1e07-4406-a05e-d7820d25a2bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.770938] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f62d10-925e-28a7-070f-42e2aeed6430, 'name': SearchDatastore_Task, 'duration_secs': 0.01552} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.771325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.771565] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.771799] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.772011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.772210] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.772733] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0f05971-f791-49cf-bf65-b8af0ae3ba5a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.782495] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.782719] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.784253] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e88bf5ad-31de-4d33-aff2-369ea2f12d9b tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.339s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.784253] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b76d9987-4031-4eb8-bd5c-f78c0eaccd41 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.789599] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 931.789599] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522d1b0c-1534-a0bf-e64a-5305641682c8" [ 931.789599] env[62974]: _type = "Task" [ 931.789599] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.798458] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522d1b0c-1534-a0bf-e64a-5305641682c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.942759] env[62974]: DEBUG oslo_vmware.api [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654714, 'name': PowerOnVM_Task, 'duration_secs': 0.506619} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.943126] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 931.943345] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-30f6aa81-bc0a-4356-a399-d50644efa4dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance 'e11408df-466c-4101-b0cc-3621cda78a45' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 932.021194] env[62974]: DEBUG nova.scheduler.client.report [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.037727] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.037986] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.038214] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.038403] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.038573] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.042035] env[62974]: INFO nova.compute.manager [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Terminating instance [ 932.104760] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a9693945-0d16-4f3e-91b7-7a4e533b105c tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.520s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.228257] env[62974]: DEBUG oslo_concurrency.lockutils [req-6ec8af6c-1f15-4311-9891-580832c2dc19 req-a9d910bc-f44d-4842-9962-327ece46b01a service nova] Releasing lock "refresh_cache-cc7c25b5-1463-4eab-8d8f-f812d4f16c34" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.241082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "59ece0e8-85c2-499d-aba2-fd45fc116013" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.241566] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.241859] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "59ece0e8-85c2-499d-aba2-fd45fc116013-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.242076] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.242254] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.244419] env[62974]: INFO nova.compute.manager [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Terminating instance [ 932.302099] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522d1b0c-1534-a0bf-e64a-5305641682c8, 'name': SearchDatastore_Task, 'duration_secs': 0.014249} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.303185] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9020cb9f-0fec-478a-9135-ea2202a093da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.310092] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 932.310092] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521816ec-e9a0-0854-0b1c-2e2b522d8b14" [ 932.310092] env[62974]: _type = "Task" [ 932.310092] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.320705] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521816ec-e9a0-0854-0b1c-2e2b522d8b14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.527270] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.945s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.530460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.372s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.547109] env[62974]: DEBUG nova.compute.manager [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.547109] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.547935] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e9271c-ee36-4130-918d-e768093d2dbe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.552224] env[62974]: INFO nova.scheduler.client.report [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted allocations for instance 14523914-68ab-4d39-8eb8-6a786ddcb4dc [ 932.558208] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.558712] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee8d3664-397b-47dd-9029-539b9b1c46dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.566859] env[62974]: DEBUG oslo_vmware.api [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 932.566859] env[62974]: value = "task-2654715" [ 932.566859] env[62974]: _type = "Task" [ 932.566859] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.578094] env[62974]: DEBUG oslo_vmware.api [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654715, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.606064] env[62974]: DEBUG nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 932.643469] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 932.643469] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.643469] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.643469] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.643740] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.643740] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 932.643813] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 932.643964] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 932.645028] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 932.645028] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 932.645028] env[62974]: DEBUG nova.virt.hardware [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 932.645797] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1809418-779e-4a30-b637-d146ec10b1f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.656196] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7923ac-4cd1-47c0-a11e-a45594d57805 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.749035] env[62974]: DEBUG nova.compute.manager [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.749035] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.749845] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d287b8b-01ca-467c-a6d3-094cd4b04ea7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.757733] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.758016] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b62b30cd-7522-498f-9850-3260280cefa9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.764907] env[62974]: DEBUG oslo_vmware.api [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 932.764907] env[62974]: value = "task-2654716" [ 932.764907] env[62974]: _type = "Task" [ 932.764907] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.775541] env[62974]: DEBUG oslo_vmware.api [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.821510] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521816ec-e9a0-0854-0b1c-2e2b522d8b14, 'name': SearchDatastore_Task, 'duration_secs': 0.023076} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.821857] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.822042] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] cc7c25b5-1463-4eab-8d8f-f812d4f16c34/cc7c25b5-1463-4eab-8d8f-f812d4f16c34.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.822316] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51644ff0-b87c-4a6d-b04a-5aba6fba27e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.829605] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 932.829605] env[62974]: value = "task-2654717" [ 932.829605] env[62974]: _type = "Task" [ 932.829605] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.838584] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.063523] env[62974]: DEBUG oslo_concurrency.lockutils [None req-89f1a469-3570-4e6b-af28-67ec70b3b395 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "14523914-68ab-4d39-8eb8-6a786ddcb4dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.070s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.077255] env[62974]: DEBUG oslo_vmware.api [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654715, 'name': PowerOffVM_Task, 'duration_secs': 0.345207} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.077529] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.077701] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.077963] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e72dc4db-0447-4eeb-90fd-2f9c6026e424 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.125559] env[62974]: DEBUG nova.compute.manager [req-032de840-8edd-468b-b3b8-e31cb771cbab req-482f2c88-0c8c-405a-8fc5-df0cd1faebd3 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Received event network-vif-plugged-fd4e4478-3958-46b8-a54d-e6619377d377 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 933.125967] env[62974]: DEBUG oslo_concurrency.lockutils [req-032de840-8edd-468b-b3b8-e31cb771cbab req-482f2c88-0c8c-405a-8fc5-df0cd1faebd3 service nova] Acquiring lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.126510] env[62974]: DEBUG oslo_concurrency.lockutils [req-032de840-8edd-468b-b3b8-e31cb771cbab req-482f2c88-0c8c-405a-8fc5-df0cd1faebd3 service nova] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.126775] env[62974]: DEBUG oslo_concurrency.lockutils [req-032de840-8edd-468b-b3b8-e31cb771cbab req-482f2c88-0c8c-405a-8fc5-df0cd1faebd3 service nova] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.127577] env[62974]: DEBUG nova.compute.manager [req-032de840-8edd-468b-b3b8-e31cb771cbab req-482f2c88-0c8c-405a-8fc5-df0cd1faebd3 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] No waiting events found dispatching network-vif-plugged-fd4e4478-3958-46b8-a54d-e6619377d377 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 933.127577] env[62974]: WARNING nova.compute.manager [req-032de840-8edd-468b-b3b8-e31cb771cbab req-482f2c88-0c8c-405a-8fc5-df0cd1faebd3 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Received unexpected event network-vif-plugged-fd4e4478-3958-46b8-a54d-e6619377d377 for instance with vm_state building and task_state spawning. [ 933.207411] env[62974]: DEBUG nova.network.neutron [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Successfully updated port: fd4e4478-3958-46b8-a54d-e6619377d377 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.223881] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.225314] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.225314] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Deleting the datastore file [datastore1] 32b17ff4-f7e1-498d-aef7-162f81cd5feb {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.225314] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6762806-7e06-4467-9103-8bdf55779ceb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.232747] env[62974]: DEBUG oslo_vmware.api [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for the task: (returnval){ [ 933.232747] env[62974]: value = "task-2654719" [ 933.232747] env[62974]: _type = "Task" [ 933.232747] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.242243] env[62974]: DEBUG oslo_vmware.api [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.276745] env[62974]: DEBUG oslo_vmware.api [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654716, 'name': PowerOffVM_Task, 'duration_secs': 0.23007} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.276745] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.276745] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.276745] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67e2bcc3-97a7-455a-9b1b-d60dc755b6c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.341507] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654717, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.345085] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.345415] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.345741] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Deleting the datastore file [datastore1] 59ece0e8-85c2-499d-aba2-fd45fc116013 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.346480] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85479eaa-a6c2-4035-a129-6d3fc5fc16fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.353670] env[62974]: DEBUG oslo_vmware.api [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for the task: (returnval){ [ 933.353670] env[62974]: value = "task-2654721" [ 933.353670] env[62974]: _type = "Task" [ 933.353670] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.363639] env[62974]: DEBUG oslo_vmware.api [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654721, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.545109] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Applying migration context for instance e11408df-466c-4101-b0cc-3621cda78a45 as it has an incoming, in-progress migration 72f105a4-6bb5-4b6d-9659-0904cb1114d8. Migration status is finished {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 933.547027] env[62974]: INFO nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating resource usage from migration 72f105a4-6bb5-4b6d-9659-0904cb1114d8 [ 933.569608] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cf73422d-7f4b-4bae-9d69-de74d7211243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.569785] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 59ece0e8-85c2-499d-aba2-fd45fc116013 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.569842] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 3426d512-d54e-4852-8eca-8ba9f5fef418 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.569987] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c002aec9-4fdf-45c9-9ef6-d196c4891e19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.570122] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 69fb00b3-6a41-4ef5-8876-6548cae31c07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.570274] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 55229db9-9442-4973-a1f2-7762227167a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.570419] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance e23dbff7-d23e-4909-9b33-67ed15c325e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.570538] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c1d0b90c-aa1c-485d-850d-a1495feac7c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.570660] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 514e0f15-f27d-4fab-9107-b92884075420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.570774] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 3df97cea-5a6e-4d7a-b2f3-e02213816e24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.570927] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 366b5816-a847-48d1-ad03-5758e473a9d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 933.571081] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 226f3328-e3b1-4ae1-8b7c-349b552cf5a2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 933.571206] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Migration 72f105a4-6bb5-4b6d-9659-0904cb1114d8 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 933.571321] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance e11408df-466c-4101-b0cc-3621cda78a45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.571461] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 32b17ff4-f7e1-498d-aef7-162f81cd5feb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.571592] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cc7c25b5-1463-4eab-8d8f-f812d4f16c34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.571706] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 933.709751] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "refresh_cache-b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.710086] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "refresh_cache-b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.710226] env[62974]: DEBUG nova.network.neutron [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.742328] env[62974]: DEBUG oslo_vmware.api [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Task: {'id': task-2654719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288757} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.742613] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.742806] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.742984] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.743183] env[62974]: INFO nova.compute.manager [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Took 1.20 seconds to destroy the instance on the hypervisor. [ 933.743464] env[62974]: DEBUG oslo.service.loopingcall [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.743664] env[62974]: DEBUG nova.compute.manager [-] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.743973] env[62974]: DEBUG nova.network.neutron [-] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.841443] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654717, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617528} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.841739] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] cc7c25b5-1463-4eab-8d8f-f812d4f16c34/cc7c25b5-1463-4eab-8d8f-f812d4f16c34.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.841937] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.842286] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d33e903f-dfb9-4e1a-aa06-51a3a478ec82 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.850929] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 933.850929] env[62974]: value = "task-2654722" [ 933.850929] env[62974]: _type = "Task" [ 933.850929] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.865718] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654722, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.870561] env[62974]: DEBUG oslo_vmware.api [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Task: {'id': task-2654721, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249559} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.870997] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.871235] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.871487] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.871787] env[62974]: INFO nova.compute.manager [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Took 1.12 seconds to destroy the instance on the hypervisor. [ 933.872062] env[62974]: DEBUG oslo.service.loopingcall [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.872385] env[62974]: DEBUG nova.compute.manager [-] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.872528] env[62974]: DEBUG nova.network.neutron [-] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 934.075411] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance ef54d01a-5d2c-448a-a060-37520de396ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.262404] env[62974]: DEBUG nova.network.neutron [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.362273] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654722, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120169} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.362588] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.363924] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6208450f-8205-4fca-9240-abbe921fd6a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.392452] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] cc7c25b5-1463-4eab-8d8f-f812d4f16c34/cc7c25b5-1463-4eab-8d8f-f812d4f16c34.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.396422] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-739d7954-f2bb-4a36-91ac-3a274257ba92 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.412882] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "e11408df-466c-4101-b0cc-3621cda78a45" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.413150] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.413344] env[62974]: DEBUG nova.compute.manager [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Going to confirm migration 4 {{(pid=62974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 934.421665] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 934.421665] env[62974]: value = "task-2654723" [ 934.421665] env[62974]: _type = "Task" [ 934.421665] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.430527] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.476566] env[62974]: DEBUG nova.network.neutron [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Updating instance_info_cache with network_info: [{"id": "fd4e4478-3958-46b8-a54d-e6619377d377", "address": "fa:16:3e:89:ae:f0", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd4e4478-39", "ovs_interfaceid": "fd4e4478-3958-46b8-a54d-e6619377d377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.583747] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 18489c02-5958-431f-aede-f554d0d785ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.647371] env[62974]: DEBUG nova.network.neutron [-] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.942056] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654723, 'name': ReconfigVM_Task, 'duration_secs': 0.342016} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.942756] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Reconfigured VM instance instance-00000054 to attach disk [datastore2] cc7c25b5-1463-4eab-8d8f-f812d4f16c34/cc7c25b5-1463-4eab-8d8f-f812d4f16c34.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.943882] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dfa958a-fd28-4be6-92a4-223bbc72271b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.950966] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 934.950966] env[62974]: value = "task-2654724" [ 934.950966] env[62974]: _type = "Task" [ 934.950966] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.960525] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654724, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.982946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "refresh_cache-b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.982946] env[62974]: DEBUG nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Instance network_info: |[{"id": "fd4e4478-3958-46b8-a54d-e6619377d377", "address": "fa:16:3e:89:ae:f0", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd4e4478-39", "ovs_interfaceid": "fd4e4478-3958-46b8-a54d-e6619377d377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 934.983180] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:ae:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06cc7c49-c46c-4c1e-bf51-77e9ea802c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd4e4478-3958-46b8-a54d-e6619377d377', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.996550] env[62974]: DEBUG oslo.service.loopingcall [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.996776] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 934.997081] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6dac6352-a48d-44a3-8cca-7755d85c710e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.014671] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.014927] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.015116] env[62974]: DEBUG nova.network.neutron [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.015320] env[62974]: DEBUG nova.objects.instance [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lazy-loading 'info_cache' on Instance uuid e11408df-466c-4101-b0cc-3621cda78a45 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.022952] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.022952] env[62974]: value = "task-2654725" [ 935.022952] env[62974]: _type = "Task" [ 935.022952] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.033491] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654725, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.086518] env[62974]: DEBUG nova.network.neutron [-] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.089084] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 92c80524-0fb6-4f28-9a72-bc4ab5793558 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 935.151420] env[62974]: INFO nova.compute.manager [-] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Took 1.41 seconds to deallocate network for instance. [ 935.165500] env[62974]: DEBUG nova.compute.manager [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Received event network-changed-fd4e4478-3958-46b8-a54d-e6619377d377 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 935.165744] env[62974]: DEBUG nova.compute.manager [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Refreshing instance network info cache due to event network-changed-fd4e4478-3958-46b8-a54d-e6619377d377. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 935.165983] env[62974]: DEBUG oslo_concurrency.lockutils [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] Acquiring lock "refresh_cache-b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.166201] env[62974]: DEBUG oslo_concurrency.lockutils [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] Acquired lock "refresh_cache-b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.166756] env[62974]: DEBUG nova.network.neutron [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Refreshing network info cache for port fd4e4478-3958-46b8-a54d-e6619377d377 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.460969] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654724, 'name': Rename_Task, 'duration_secs': 0.192227} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.461270] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.461523] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0513678-c8dc-4750-93d6-112ce9b582c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.466936] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 935.466936] env[62974]: value = "task-2654726" [ 935.466936] env[62974]: _type = "Task" [ 935.466936] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.474949] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.534545] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654725, 'name': CreateVM_Task, 'duration_secs': 0.439782} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.534731] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.535580] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.535803] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.536234] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 935.536533] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c869b3fb-6f0c-4c03-99e6-cb0ccc1d16cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.541507] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 935.541507] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520d2d6d-611e-025a-f373-2542bdcaa9d3" [ 935.541507] env[62974]: _type = "Task" [ 935.541507] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.549695] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520d2d6d-611e-025a-f373-2542bdcaa9d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.592053] env[62974]: INFO nova.compute.manager [-] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Took 1.72 seconds to deallocate network for instance. [ 935.593264] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance aa6eb55e-79c0-4e1f-8756-05dff97b06d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 935.593407] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 935.593718] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 935.659416] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.860690] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056a9078-5957-4dfe-9470-0358bb62c0c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.869575] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c764a07-2853-457c-a742-e7d687c5fb8b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.874766] env[62974]: DEBUG nova.network.neutron [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Updated VIF entry in instance network info cache for port fd4e4478-3958-46b8-a54d-e6619377d377. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.875136] env[62974]: DEBUG nova.network.neutron [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Updating instance_info_cache with network_info: [{"id": "fd4e4478-3958-46b8-a54d-e6619377d377", "address": "fa:16:3e:89:ae:f0", "network": {"id": "6be3a6da-df9a-47f6-abb4-f5837887128d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-181968827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d914830aaf454e26b77cbb46722764ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06cc7c49-c46c-4c1e-bf51-77e9ea802c40", "external-id": "nsx-vlan-transportzone-450", "segmentation_id": 450, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd4e4478-39", "ovs_interfaceid": "fd4e4478-3958-46b8-a54d-e6619377d377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.905319] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bed2ea-5939-4cf2-bbbf-fbab583fe477 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.913475] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32377236-1d7d-46ee-ad44-de6c4a0d0bdd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.929295] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.977526] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654726, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.052205] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520d2d6d-611e-025a-f373-2542bdcaa9d3, 'name': SearchDatastore_Task, 'duration_secs': 0.014006} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.054691] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.054908] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 936.055156] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.055304] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.055479] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.055740] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12087d43-9ad7-4df6-96b9-ba80b79986e4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.064219] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.064417] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 936.065142] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2f7f225-163b-4d86-a157-9d29994e4289 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.070723] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 936.070723] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]529db39f-70c9-cd6d-2d9d-4e2eb4ad261c" [ 936.070723] env[62974]: _type = "Task" [ 936.070723] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.078671] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529db39f-70c9-cd6d-2d9d-4e2eb4ad261c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.105038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.239691] env[62974]: DEBUG nova.network.neutron [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance_info_cache with network_info: [{"id": "39690695-af5c-4491-9d0f-b5ea691ce54f", "address": "fa:16:3e:1c:c5:83", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39690695-af", "ovs_interfaceid": "39690695-af5c-4491-9d0f-b5ea691ce54f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.379069] env[62974]: DEBUG oslo_concurrency.lockutils [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] Releasing lock "refresh_cache-b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.379365] env[62974]: DEBUG nova.compute.manager [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Received event network-vif-deleted-09eaa85f-9e94-4988-9a61-7595a0fbe90c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 936.379575] env[62974]: DEBUG nova.compute.manager [req-baf36fa3-11d9-474b-a69f-a55c3b1774ab req-6f3eda1e-3f9d-4cb9-b755-3746e20ee6a8 service nova] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Received event network-vif-deleted-9ad4fa58-ef22-4d11-9cb7-041017dd38fc {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 936.432390] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.479209] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654726, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.583198] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]529db39f-70c9-cd6d-2d9d-4e2eb4ad261c, 'name': SearchDatastore_Task, 'duration_secs': 0.0123} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.584308] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7243b30-d636-48df-bfe3-15ab9cfd3590 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.591046] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 936.591046] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5283db83-63db-fb14-472f-0e598e7bc0af" [ 936.591046] env[62974]: _type = "Task" [ 936.591046] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.599456] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5283db83-63db-fb14-472f-0e598e7bc0af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.743329] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-e11408df-466c-4101-b0cc-3621cda78a45" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.743695] env[62974]: DEBUG nova.objects.instance [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lazy-loading 'migration_context' on Instance uuid e11408df-466c-4101-b0cc-3621cda78a45 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.938077] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 936.938317] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.408s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.938686] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.699s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.938913] env[62974]: DEBUG nova.objects.instance [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 936.980750] env[62974]: DEBUG oslo_vmware.api [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654726, 'name': PowerOnVM_Task, 'duration_secs': 1.119384} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.981110] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.981338] env[62974]: INFO nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Took 8.90 seconds to spawn the instance on the hypervisor. [ 936.981600] env[62974]: DEBUG nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.982531] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b60989-fad3-4212-a9fd-38e65b02b3a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.101922] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5283db83-63db-fb14-472f-0e598e7bc0af, 'name': SearchDatastore_Task, 'duration_secs': 0.013107} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.102667] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.102787] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4/b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 937.102987] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2faa1f56-75f9-4553-9eb1-228bc6879976 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.110073] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 937.110073] env[62974]: value = "task-2654727" [ 937.110073] env[62974]: _type = "Task" [ 937.110073] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.117577] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.175507] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9ecd-cc61-b557-5307-ddd3a35d5d0a/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 937.176474] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b6f607-a58e-4e38-b75e-24b5c8b1f3c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.184566] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9ecd-cc61-b557-5307-ddd3a35d5d0a/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 937.184732] env[62974]: ERROR oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9ecd-cc61-b557-5307-ddd3a35d5d0a/disk-0.vmdk due to incomplete transfer. [ 937.184959] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f440a4b1-b314-4eda-b0b6-6a0629098002 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.191743] env[62974]: DEBUG oslo_vmware.rw_handles [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9ecd-cc61-b557-5307-ddd3a35d5d0a/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 937.191933] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Uploaded image c8e3e92f-78f7-4298-9214-5d340b78d888 to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 937.194432] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 937.194723] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-701c0672-ea6a-48d5-9c40-1280828d6b83 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.201092] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 937.201092] env[62974]: value = "task-2654728" [ 937.201092] env[62974]: _type = "Task" [ 937.201092] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.209255] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654728, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.246720] env[62974]: DEBUG nova.objects.base [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 937.247744] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859a7440-ffc9-4fcd-bbce-220399dc0fb5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.267533] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-281b82c1-0bca-4332-ba59-d9d1fbd67f60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.273030] env[62974]: DEBUG oslo_vmware.api [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 937.273030] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522739ca-d8c5-4f8e-ecd8-7bd0cec6611a" [ 937.273030] env[62974]: _type = "Task" [ 937.273030] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.280664] env[62974]: DEBUG oslo_vmware.api [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522739ca-d8c5-4f8e-ecd8-7bd0cec6611a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.502344] env[62974]: INFO nova.compute.manager [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Took 31.13 seconds to build instance. [ 937.623023] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506114} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.623351] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4/b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 937.623633] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 937.624122] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-872e32b4-386f-4d6b-ae09-717debf4266e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.631628] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 937.631628] env[62974]: value = "task-2654729" [ 937.631628] env[62974]: _type = "Task" [ 937.631628] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.640464] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.711988] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654728, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.783974] env[62974]: DEBUG oslo_vmware.api [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522739ca-d8c5-4f8e-ecd8-7bd0cec6611a, 'name': SearchDatastore_Task, 'duration_secs': 0.007306} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.784305] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.951345] env[62974]: DEBUG oslo_concurrency.lockutils [None req-334b6f99-5e33-4551-9cba-a0c47fced3c8 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.952472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.588s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.952670] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.954755] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.547s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.956582] env[62974]: INFO nova.compute.claims [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.982629] env[62974]: INFO nova.scheduler.client.report [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Deleted allocations for instance 366b5816-a847-48d1-ad03-5758e473a9d0 [ 938.004771] env[62974]: DEBUG oslo_concurrency.lockutils [None req-cd269816-f823-4a03-907b-553701a58c46 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.648s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.144161] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.200079} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.144426] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 938.145218] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0142c0a2-3754-4b8c-8215-342659908343 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.167114] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4/b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.167793] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53e0613c-a31d-4373-a2df-e7953b2e2ef4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.187320] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 938.187320] env[62974]: value = "task-2654730" [ 938.187320] env[62974]: _type = "Task" [ 938.187320] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.195458] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654730, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.198472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.198472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.198623] env[62974]: DEBUG nova.compute.manager [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.199495] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f154b9a3-0860-4115-83dd-36258acc2dac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.209350] env[62974]: DEBUG nova.compute.manager [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 938.210132] env[62974]: DEBUG nova.objects.instance [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'flavor' on Instance uuid cc7c25b5-1463-4eab-8d8f-f812d4f16c34 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.215453] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654728, 'name': Destroy_Task, 'duration_secs': 0.57805} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.215998] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Destroyed the VM [ 938.216428] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 938.217122] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b557a482-3387-44eb-8406-823c2a24d08b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.222586] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 938.222586] env[62974]: value = "task-2654731" [ 938.222586] env[62974]: _type = "Task" [ 938.222586] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.232686] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654731, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.492345] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9278eeff-15df-4568-af6a-8b8ba3d0c31d tempest-ServersNegativeTestJSON-1132372439 tempest-ServersNegativeTestJSON-1132372439-project-member] Lock "366b5816-a847-48d1-ad03-5758e473a9d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.975s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.699162] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.733695] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654731, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.202156] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654730, 'name': ReconfigVM_Task, 'duration_secs': 0.883282} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.205723] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Reconfigured VM instance instance-00000055 to attach disk [datastore2] b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4/b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 939.209488] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab0549ae-aeff-4270-a528-4670b1c08bd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.218453] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 939.218453] env[62974]: value = "task-2654732" [ 939.218453] env[62974]: _type = "Task" [ 939.218453] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.224276] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.224771] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39e8dcfc-d5c7-4b48-a4f1-c6c6a53c98b3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.233735] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654732, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.242650] env[62974]: DEBUG oslo_vmware.api [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 939.242650] env[62974]: value = "task-2654733" [ 939.242650] env[62974]: _type = "Task" [ 939.242650] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.243018] env[62974]: DEBUG oslo_vmware.api [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654731, 'name': RemoveSnapshot_Task, 'duration_secs': 0.515484} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.243379] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 939.243614] env[62974]: INFO nova.compute.manager [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Took 14.51 seconds to snapshot the instance on the hypervisor. [ 939.257391] env[62974]: DEBUG oslo_vmware.api [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.306755] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e4c09b-7e4c-443c-a9fb-ac29e8e4d5b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.321185] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007db6dc-b700-4d9a-b75f-9f29d12b4358 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.358478] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6042d4d5-fb26-4d52-aefa-f16d1b6f87ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.368021] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b9baab-8cee-4c0d-844a-ff155d6e6e6c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.383078] env[62974]: DEBUG nova.compute.provider_tree [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.731272] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654732, 'name': Rename_Task, 'duration_secs': 0.166251} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.731883] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 939.732446] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de8e8efb-296a-4a18-93b9-902ed8f66064 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.741044] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 939.741044] env[62974]: value = "task-2654734" [ 939.741044] env[62974]: _type = "Task" [ 939.741044] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.757330] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654734, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.763238] env[62974]: DEBUG oslo_vmware.api [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654733, 'name': PowerOffVM_Task, 'duration_secs': 0.199498} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.763704] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.763941] env[62974]: DEBUG nova.compute.manager [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.765316] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c14add-8981-48e7-bbc4-0555dc23d6cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.819886] env[62974]: DEBUG nova.compute.manager [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Found 3 images (rotation: 2) {{(pid=62974) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 939.820436] env[62974]: DEBUG nova.compute.manager [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Rotating out 1 backups {{(pid=62974) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 939.820436] env[62974]: DEBUG nova.compute.manager [None req-5a5a7fc2-eb70-4923-9713-4b4d11a5c688 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleting image 5d681a90-3310-451a-8a9f-42285b699971 {{(pid=62974) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 939.885793] env[62974]: DEBUG nova.scheduler.client.report [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.250955] env[62974]: DEBUG oslo_vmware.api [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654734, 'name': PowerOnVM_Task, 'duration_secs': 0.50686} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.251334] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 940.251443] env[62974]: INFO nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Took 7.65 seconds to spawn the instance on the hypervisor. [ 940.251617] env[62974]: DEBUG nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.252388] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b993d5-2b25-4278-8530-d4a7037937a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.277663] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4107779c-4ca6-4564-a34e-05a140c93086 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.079s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.391092] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.391652] env[62974]: DEBUG nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 940.394345] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.753s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.395816] env[62974]: INFO nova.compute.claims [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.508967] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.509321] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.601415] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.601730] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.602032] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.602208] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.602388] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.605059] env[62974]: INFO nova.compute.manager [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Terminating instance [ 940.768517] env[62974]: INFO nova.compute.manager [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Took 28.58 seconds to build instance. [ 940.900113] env[62974]: DEBUG nova.compute.utils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 940.903312] env[62974]: DEBUG nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 940.903488] env[62974]: DEBUG nova.network.neutron [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 940.943797] env[62974]: DEBUG nova.policy [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '862aa8801fca473dbfb437eba485cdf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91fca1731aab4207a0f333e5d6f630f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 941.012397] env[62974]: INFO nova.compute.manager [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Detaching volume 63d19e7f-b8da-4842-a976-78d65b2d6e22 [ 941.061605] env[62974]: INFO nova.virt.block_device [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Attempting to driver detach volume 63d19e7f-b8da-4842-a976-78d65b2d6e22 from mountpoint /dev/sdb [ 941.061838] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 941.062055] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535418', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'name': 'volume-63d19e7f-b8da-4842-a976-78d65b2d6e22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c1d0b90c-aa1c-485d-850d-a1495feac7c9', 'attached_at': '', 'detached_at': '', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'serial': '63d19e7f-b8da-4842-a976-78d65b2d6e22'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 941.062951] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1f1b2c-bdc6-4a2c-8c21-611240516ceb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.087967] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed90d90-8a8f-4927-a1f8-699e01336b58 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.095078] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b5ea4a-4d1a-43ad-b2f8-d6ea4eedffa3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.117415] env[62974]: DEBUG nova.compute.manager [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 941.117634] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.118507] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a10c683-bf2d-414a-b176-08c0077e82b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.121717] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac142c33-d25f-4b3b-b679-6721d2cf8a29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.128896] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.139564] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b125a85-d288-4b29-be62-b403a3ef3270 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.141634] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] The volume has not been displaced from its original location: [datastore1] volume-63d19e7f-b8da-4842-a976-78d65b2d6e22/volume-63d19e7f-b8da-4842-a976-78d65b2d6e22.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 941.147351] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 941.147723] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55152a60-4538-4a74-a7ea-b33632480efb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.167376] env[62974]: DEBUG oslo_vmware.api [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 941.167376] env[62974]: value = "task-2654736" [ 941.167376] env[62974]: _type = "Task" [ 941.167376] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.178026] env[62974]: DEBUG oslo_vmware.api [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654736, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.226870] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.226870] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.226870] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleting the datastore file [datastore2] cc7c25b5-1463-4eab-8d8f-f812d4f16c34 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.226870] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-444ee4bf-0b6a-4a98-a3f7-42ef95bbf21b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.233192] env[62974]: DEBUG oslo_vmware.api [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 941.233192] env[62974]: value = "task-2654737" [ 941.233192] env[62974]: _type = "Task" [ 941.233192] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.241657] env[62974]: DEBUG oslo_vmware.api [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.242468] env[62974]: DEBUG nova.network.neutron [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Successfully created port: ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.270251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7adcfb0d-9e2f-4ac7-9375-cf2a79954e04 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.093s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.404560] env[62974]: DEBUG nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 941.679303] env[62974]: DEBUG oslo_vmware.api [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654736, 'name': ReconfigVM_Task, 'duration_secs': 0.234407} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.679303] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 941.685941] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6df1bda4-4d69-4d0f-aedb-1ceeef15d8e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.702396] env[62974]: DEBUG oslo_vmware.api [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 941.702396] env[62974]: value = "task-2654738" [ 941.702396] env[62974]: _type = "Task" [ 941.702396] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.712992] env[62974]: DEBUG oslo_vmware.api [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654738, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.738732] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c922842-9519-45ee-bc01-692a70266ce5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.747447] env[62974]: DEBUG oslo_vmware.api [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167008} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.749480] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.749738] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.749946] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.750256] env[62974]: INFO nova.compute.manager [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Took 0.63 seconds to destroy the instance on the hypervisor. [ 941.750569] env[62974]: DEBUG oslo.service.loopingcall [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.750840] env[62974]: DEBUG nova.compute.manager [-] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.751064] env[62974]: DEBUG nova.network.neutron [-] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.754121] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a26edeb-03b2-44ce-9943-558a7ebfaae1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.786336] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a83387-7bf7-4cca-8650-59e3d4e7e9fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.794132] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555d404e-aaa7-434d-a6bb-9085de31c303 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.806756] env[62974]: DEBUG nova.compute.provider_tree [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.887329] env[62974]: DEBUG nova.compute.manager [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 941.888241] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44725604-a243-47a3-b136-a869262d2394 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.000081] env[62974]: DEBUG nova.compute.manager [req-230426ec-8c34-4332-8007-c414eb304cb1 req-1de48ab9-e115-4eb1-b1c9-582b7da50dfe service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Received event network-vif-deleted-c027407f-1e07-4406-a05e-d7820d25a2bf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 942.000396] env[62974]: INFO nova.compute.manager [req-230426ec-8c34-4332-8007-c414eb304cb1 req-1de48ab9-e115-4eb1-b1c9-582b7da50dfe service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Neutron deleted interface c027407f-1e07-4406-a05e-d7820d25a2bf; detaching it from the instance and deleting it from the info cache [ 942.000506] env[62974]: DEBUG nova.network.neutron [req-230426ec-8c34-4332-8007-c414eb304cb1 req-1de48ab9-e115-4eb1-b1c9-582b7da50dfe service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.119453] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "55229db9-9442-4973-a1f2-7762227167a4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.119724] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.119905] env[62974]: DEBUG nova.compute.manager [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 942.120885] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6025f3c3-4a94-47a8-bb2c-d63025598ac1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.127582] env[62974]: DEBUG nova.compute.manager [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 942.128175] env[62974]: DEBUG nova.objects.instance [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'flavor' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.212612] env[62974]: DEBUG oslo_vmware.api [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654738, 'name': ReconfigVM_Task, 'duration_secs': 0.147298} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.212970] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535418', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'name': 'volume-63d19e7f-b8da-4842-a976-78d65b2d6e22', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c1d0b90c-aa1c-485d-850d-a1495feac7c9', 'attached_at': '', 'detached_at': '', 'volume_id': '63d19e7f-b8da-4842-a976-78d65b2d6e22', 'serial': '63d19e7f-b8da-4842-a976-78d65b2d6e22'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 942.311205] env[62974]: DEBUG nova.scheduler.client.report [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.399618] env[62974]: INFO nova.compute.manager [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] instance snapshotting [ 942.402310] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907b5137-63f0-4f9b-ad73-80800a854678 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.421237] env[62974]: DEBUG nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 942.424089] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88595cac-f41a-46d3-88ca-d959daa0d56f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.446704] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.446965] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.447153] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.447343] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.447558] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.447758] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.447972] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.448148] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.448318] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.448482] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.448652] env[62974]: DEBUG nova.virt.hardware [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.449481] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641c664f-8ced-4ff0-8004-3fcc23122308 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.457732] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeb2ffc-92b8-47ae-8a43-07b78ae115ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.483367] env[62974]: DEBUG nova.network.neutron [-] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.504058] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9221456a-986e-440c-9dbb-0304a540bdb2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.513173] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffee5014-4fe9-4717-b0d9-daaeace84360 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.546067] env[62974]: DEBUG nova.compute.manager [req-230426ec-8c34-4332-8007-c414eb304cb1 req-1de48ab9-e115-4eb1-b1c9-582b7da50dfe service nova] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Detach interface failed, port_id=c027407f-1e07-4406-a05e-d7820d25a2bf, reason: Instance cc7c25b5-1463-4eab-8d8f-f812d4f16c34 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 942.720706] env[62974]: DEBUG nova.network.neutron [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Successfully updated port: ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 942.760088] env[62974]: DEBUG nova.objects.instance [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.816047] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.816646] env[62974]: DEBUG nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 942.819745] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.620s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.819905] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.823341] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.434s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.824104] env[62974]: INFO nova.compute.claims [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.848583] env[62974]: INFO nova.scheduler.client.report [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleted allocations for instance 226f3328-e3b1-4ae1-8b7c-349b552cf5a2 [ 942.934324] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 942.934689] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-010c2743-4f52-4a5d-ac23-e3977d77e343 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.942789] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 942.942789] env[62974]: value = "task-2654739" [ 942.942789] env[62974]: _type = "Task" [ 942.942789] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.951083] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654739, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.985220] env[62974]: INFO nova.compute.manager [-] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Took 1.23 seconds to deallocate network for instance. [ 943.140784] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.141090] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2669f871-3338-4712-bdc4-71427b17f4fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.148781] env[62974]: DEBUG oslo_vmware.api [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 943.148781] env[62974]: value = "task-2654740" [ 943.148781] env[62974]: _type = "Task" [ 943.148781] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.157219] env[62974]: DEBUG oslo_vmware.api [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.225571] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.225756] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquired lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.225977] env[62974]: DEBUG nova.network.neutron [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.329346] env[62974]: DEBUG nova.compute.utils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.334689] env[62974]: DEBUG nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 943.334689] env[62974]: DEBUG nova.network.neutron [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 943.356500] env[62974]: DEBUG oslo_concurrency.lockutils [None req-09065d5f-973d-4f59-8494-f9da5687d587 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "226f3328-e3b1-4ae1-8b7c-349b552cf5a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.562s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.379169] env[62974]: DEBUG nova.policy [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 943.453018] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654739, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.493809] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.655646] env[62974]: DEBUG nova.network.neutron [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Successfully created port: 7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.663637] env[62974]: DEBUG oslo_vmware.api [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654740, 'name': PowerOffVM_Task, 'duration_secs': 0.223059} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.663637] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.663749] env[62974]: DEBUG nova.compute.manager [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.664779] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38ab83b-dcf4-418f-b7ec-afcebaf5e8b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.705163] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.769969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b89248ce-c8bc-48bc-ab27-6d1757c0f4db tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.261s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.771182] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.066s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.771375] env[62974]: DEBUG nova.compute.manager [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.772460] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7bebce-2070-406c-8f0a-d880c4a35f23 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.776331] env[62974]: DEBUG nova.network.neutron [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.783301] env[62974]: DEBUG nova.compute.manager [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 943.783987] env[62974]: DEBUG nova.objects.instance [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.834214] env[62974]: DEBUG nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 943.958693] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654739, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.993441] env[62974]: DEBUG nova.network.neutron [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Updating instance_info_cache with network_info: [{"id": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "address": "fa:16:3e:93:b0:61", "network": {"id": "3d204a06-2895-4d42-897a-0b36774f6e9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-704885625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fca1731aab4207a0f333e5d6f630f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccb1f4c6-57", "ovs_interfaceid": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.039917] env[62974]: DEBUG nova.compute.manager [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Received event network-vif-plugged-ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 944.039917] env[62974]: DEBUG oslo_concurrency.lockutils [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] Acquiring lock "ef54d01a-5d2c-448a-a060-37520de396ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.040382] env[62974]: DEBUG oslo_concurrency.lockutils [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] Lock "ef54d01a-5d2c-448a-a060-37520de396ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.040523] env[62974]: DEBUG oslo_concurrency.lockutils [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] Lock "ef54d01a-5d2c-448a-a060-37520de396ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.040761] env[62974]: DEBUG nova.compute.manager [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] No waiting events found dispatching network-vif-plugged-ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 944.040944] env[62974]: WARNING nova.compute.manager [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Received unexpected event network-vif-plugged-ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 for instance with vm_state building and task_state spawning. [ 944.041314] env[62974]: DEBUG nova.compute.manager [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Received event network-changed-ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 944.041517] env[62974]: DEBUG nova.compute.manager [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Refreshing instance network info cache due to event network-changed-ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 944.041691] env[62974]: DEBUG oslo_concurrency.lockutils [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] Acquiring lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.145359] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11b2bb8-d574-46a1-9991-c5e0f3593f3b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.153147] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7b606c-01d6-411e-931d-330537ebb601 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.192021] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26efc6a-2fcb-4bf2-8998-81095f5b3e27 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.193729] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9eb28795-13b5-4ac9-81ec-71d400461d10 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.074s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.200176] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a8f0a5-ae29-4343-b56e-34402b7a2dbd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.214974] env[62974]: DEBUG nova.compute.provider_tree [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.457316] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654739, 'name': CreateSnapshot_Task, 'duration_secs': 1.184365} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.457662] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 944.458448] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f7d47c-3cff-4594-b77a-21a0b1b4c81f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.499014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Releasing lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.499469] env[62974]: DEBUG nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Instance network_info: |[{"id": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "address": "fa:16:3e:93:b0:61", "network": {"id": "3d204a06-2895-4d42-897a-0b36774f6e9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-704885625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fca1731aab4207a0f333e5d6f630f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccb1f4c6-57", "ovs_interfaceid": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 944.499968] env[62974]: DEBUG oslo_concurrency.lockutils [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] Acquired lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.500175] env[62974]: DEBUG nova.network.neutron [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Refreshing network info cache for port ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.501419] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:b0:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2153f70-3d14-42ab-8bb3-be78296dd3b8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.508893] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Creating folder: Project (91fca1731aab4207a0f333e5d6f630f5). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 944.510552] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f32c2a7c-eca2-4b54-9978-93c8da8ad221 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.521893] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Created folder: Project (91fca1731aab4207a0f333e5d6f630f5) in parent group-v535199. [ 944.522089] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Creating folder: Instances. Parent ref: group-v535443. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 944.522402] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d58de90-9857-4c64-9a10-a82498f034bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.530710] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Created folder: Instances in parent group-v535443. [ 944.530911] env[62974]: DEBUG oslo.service.loopingcall [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.531021] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.531212] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48e4193d-c838-461f-899e-d4bc6af9c31e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.550288] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.550288] env[62974]: value = "task-2654743" [ 944.550288] env[62974]: _type = "Task" [ 944.550288] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.557744] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654743, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.718485] env[62974]: DEBUG nova.scheduler.client.report [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.793819] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.794163] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efa16c34-27d0-4b66-affb-70ae228e39ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.801620] env[62974]: DEBUG oslo_vmware.api [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 944.801620] env[62974]: value = "task-2654744" [ 944.801620] env[62974]: _type = "Task" [ 944.801620] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.811675] env[62974]: DEBUG oslo_vmware.api [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.848832] env[62974]: DEBUG nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 944.889186] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.889480] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.889643] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.889823] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.889966] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.890136] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.890347] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.890506] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.890689] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.890915] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.891168] env[62974]: DEBUG nova.virt.hardware [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.892140] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbf849c-6996-4e72-91a8-4f4f5a71bf89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.899879] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b147f8f-fa54-4c17-b9bf-5ab12e80a2ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.926024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "0f19241f-1650-41e5-8fe8-828024bf6aaa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.926253] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.980085] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 944.980552] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.981216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.982983] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3ec783a8-e6f2-429b-a1a9-471de189e8cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.991741] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 944.991741] env[62974]: value = "task-2654745" [ 944.991741] env[62974]: _type = "Task" [ 944.991741] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.002890] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654745, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.060879] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654743, 'name': CreateVM_Task, 'duration_secs': 0.360056} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.060879] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 945.061138] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.061138] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.061404] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 945.061691] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-000d9b9a-8090-4430-811c-ef310d08593f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.067908] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 945.067908] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52665b24-6d47-9c0f-c673-a2e4b061d9a8" [ 945.067908] env[62974]: _type = "Task" [ 945.067908] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.076015] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52665b24-6d47-9c0f-c673-a2e4b061d9a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.223261] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.223782] env[62974]: DEBUG nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.228822] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.250s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.230288] env[62974]: INFO nova.compute.claims [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.312655] env[62974]: DEBUG oslo_vmware.api [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654744, 'name': PowerOffVM_Task, 'duration_secs': 0.18738} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.312890] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.313091] env[62974]: DEBUG nova.compute.manager [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.313863] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0baa942-bae1-4ed7-9bc3-d108168b8974 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.342035] env[62974]: DEBUG nova.network.neutron [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Updated VIF entry in instance network info cache for port ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.342528] env[62974]: DEBUG nova.network.neutron [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Updating instance_info_cache with network_info: [{"id": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "address": "fa:16:3e:93:b0:61", "network": {"id": "3d204a06-2895-4d42-897a-0b36774f6e9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-704885625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fca1731aab4207a0f333e5d6f630f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccb1f4c6-57", "ovs_interfaceid": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.406905] env[62974]: DEBUG nova.network.neutron [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Successfully updated port: 7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.428308] env[62974]: DEBUG nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 945.487417] env[62974]: INFO nova.compute.manager [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Detaching volume e582231b-0f13-489f-96dd-9dd8e2561572 [ 945.489576] env[62974]: DEBUG nova.compute.manager [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Stashing vm_state: stopped {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 945.503950] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654745, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.529415] env[62974]: INFO nova.virt.block_device [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Attempting to driver detach volume e582231b-0f13-489f-96dd-9dd8e2561572 from mountpoint /dev/sdb [ 945.529656] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 945.529842] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535414', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'name': 'volume-e582231b-0f13-489f-96dd-9dd8e2561572', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e23dbff7-d23e-4909-9b33-67ed15c325e7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'serial': 'e582231b-0f13-489f-96dd-9dd8e2561572'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 945.530765] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab35d5f4-4fd3-4a57-87bb-f47f65959c13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.553090] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed11bbf9-d98f-4708-bb39-cc400d58c19c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.561341] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35f7c62-5a54-4b27-b9af-5a778fedc3e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.585522] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5b4805-2c05-431c-b61f-3758074ab1fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.593267] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52665b24-6d47-9c0f-c673-a2e4b061d9a8, 'name': SearchDatastore_Task, 'duration_secs': 0.018211} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.603664] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.603933] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.604194] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.604344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.604522] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.604818] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] The volume has not been displaced from its original location: [datastore2] volume-e582231b-0f13-489f-96dd-9dd8e2561572/volume-e582231b-0f13-489f-96dd-9dd8e2561572.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 945.610144] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfiguring VM instance instance-00000046 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 945.610445] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08fb75d1-cabc-4f71-bd77-dd0056dd16d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.612303] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-606641fc-f65f-4f49-8d33-f985fb652f80 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.634014] env[62974]: DEBUG oslo_vmware.api [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 945.634014] env[62974]: value = "task-2654746" [ 945.634014] env[62974]: _type = "Task" [ 945.634014] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.635180] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.635381] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.641442] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95fc37bd-869f-4488-b9ae-c9c2ab4d2140 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.648842] env[62974]: DEBUG oslo_vmware.api [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654746, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.650088] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 945.650088] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f61a01-43b0-6d89-f6ee-25493deda461" [ 945.650088] env[62974]: _type = "Task" [ 945.650088] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.657437] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f61a01-43b0-6d89-f6ee-25493deda461, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.734480] env[62974]: DEBUG nova.compute.utils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 945.737984] env[62974]: DEBUG nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 945.738136] env[62974]: DEBUG nova.network.neutron [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 945.776589] env[62974]: DEBUG nova.policy [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba02fdedff8346818918b58f4c1dbc05', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '170b4f70ba6341969a71ff316893d640', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 945.825989] env[62974]: DEBUG oslo_concurrency.lockutils [None req-eec00eb3-a1bb-442c-ab53-cbbc1cdfdcf3 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.845839] env[62974]: DEBUG oslo_concurrency.lockutils [req-4905b571-434d-43cb-9534-c8e448606522 req-5bb655bf-001b-4d00-9e33-29b28d1f43ee service nova] Releasing lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.911966] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.911966] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.911966] env[62974]: DEBUG nova.network.neutron [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.955033] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.008273] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654745, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.020706] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.088146] env[62974]: DEBUG nova.network.neutron [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Successfully created port: 4bf87ffd-a962-46b0-8d3e-aee290745b3d {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.132261] env[62974]: DEBUG nova.objects.instance [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.145272] env[62974]: DEBUG oslo_vmware.api [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654746, 'name': ReconfigVM_Task, 'duration_secs': 0.268953} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.146218] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Reconfigured VM instance instance-00000046 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 946.152432] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85c86022-6ad0-40c1-aef4-8373b96745b6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.163360] env[62974]: DEBUG nova.compute.manager [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received event network-vif-plugged-7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 946.163455] env[62974]: DEBUG oslo_concurrency.lockutils [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.163654] env[62974]: DEBUG oslo_concurrency.lockutils [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] Lock "18489c02-5958-431f-aede-f554d0d785ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.163817] env[62974]: DEBUG oslo_concurrency.lockutils [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] Lock "18489c02-5958-431f-aede-f554d0d785ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.163974] env[62974]: DEBUG nova.compute.manager [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] No waiting events found dispatching network-vif-plugged-7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 946.164145] env[62974]: WARNING nova.compute.manager [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received unexpected event network-vif-plugged-7a680703-498d-42ed-9269-736752f5f38e for instance with vm_state building and task_state spawning. [ 946.164308] env[62974]: DEBUG nova.compute.manager [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received event network-changed-7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 946.164456] env[62974]: DEBUG nova.compute.manager [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Refreshing instance network info cache due to event network-changed-7a680703-498d-42ed-9269-736752f5f38e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 946.164612] env[62974]: DEBUG oslo_concurrency.lockutils [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] Acquiring lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.174977] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f61a01-43b0-6d89-f6ee-25493deda461, 'name': SearchDatastore_Task, 'duration_secs': 0.009916} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.177442] env[62974]: DEBUG oslo_vmware.api [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 946.177442] env[62974]: value = "task-2654747" [ 946.177442] env[62974]: _type = "Task" [ 946.177442] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.177639] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43eaa1ec-8821-414a-8547-4edaae0ea673 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.186185] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 946.186185] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5277af07-b149-a284-fd38-45aa5481c040" [ 946.186185] env[62974]: _type = "Task" [ 946.186185] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.189535] env[62974]: DEBUG oslo_vmware.api [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654747, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.197324] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5277af07-b149-a284-fd38-45aa5481c040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.242492] env[62974]: DEBUG nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.450080] env[62974]: DEBUG nova.network.neutron [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.503492] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654745, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.532969] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aaaa34e-ede9-4b65-bbc7-5fb130d1449e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.540899] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9a95a6-a30c-4dbb-b13f-056c6d2a9025 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.575389] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc3023a-f79b-4244-a223-a962ac99db3b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.584513] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230669c4-4993-4478-a687-81efe23dfb95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.597728] env[62974]: DEBUG nova.compute.provider_tree [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.625036] env[62974]: DEBUG nova.network.neutron [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.636602] env[62974]: DEBUG oslo_concurrency.lockutils [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.636602] env[62974]: DEBUG oslo_concurrency.lockutils [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.636758] env[62974]: DEBUG nova.network.neutron [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.636949] env[62974]: DEBUG nova.objects.instance [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'info_cache' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.690352] env[62974]: DEBUG oslo_vmware.api [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654747, 'name': ReconfigVM_Task, 'duration_secs': 0.150247} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.693248] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535414', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'name': 'volume-e582231b-0f13-489f-96dd-9dd8e2561572', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e23dbff7-d23e-4909-9b33-67ed15c325e7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e582231b-0f13-489f-96dd-9dd8e2561572', 'serial': 'e582231b-0f13-489f-96dd-9dd8e2561572'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 946.700135] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5277af07-b149-a284-fd38-45aa5481c040, 'name': SearchDatastore_Task, 'duration_secs': 0.010026} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.700408] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.700677] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] ef54d01a-5d2c-448a-a060-37520de396ca/ef54d01a-5d2c-448a-a060-37520de396ca.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 946.700930] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4473419d-efde-464f-9f68-8494a51c4c8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.706557] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 946.706557] env[62974]: value = "task-2654748" [ 946.706557] env[62974]: _type = "Task" [ 946.706557] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.714498] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.003804] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654745, 'name': CloneVM_Task, 'duration_secs': 1.861036} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.004163] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Created linked-clone VM from snapshot [ 947.005126] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e936171-273f-4b09-aa0d-e53b40d83697 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.017130] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Uploading image c9e1cc29-62e0-4d71-8837-c706f5a09e65 {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 947.033885] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 947.034255] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9f4f1ef1-a802-4882-b972-03882a0db5dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.041628] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 947.041628] env[62974]: value = "task-2654749" [ 947.041628] env[62974]: _type = "Task" [ 947.041628] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.050917] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654749, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.101251] env[62974]: DEBUG nova.scheduler.client.report [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.128180] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.128540] env[62974]: DEBUG nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Instance network_info: |[{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 947.128852] env[62974]: DEBUG oslo_concurrency.lockutils [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] Acquired lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.129052] env[62974]: DEBUG nova.network.neutron [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Refreshing network info cache for port 7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.130405] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:d5:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a680703-498d-42ed-9269-736752f5f38e', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.138088] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Creating folder: Project (3f990de0bcb0403195a272efcc0e104c). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.141561] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79fa777e-6bc3-487c-b8b1-b4786f006fc2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.144750] env[62974]: DEBUG nova.objects.base [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 947.156043] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Created folder: Project (3f990de0bcb0403195a272efcc0e104c) in parent group-v535199. [ 947.156043] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Creating folder: Instances. Parent ref: group-v535447. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.156677] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9d74c76-6327-474b-bba8-2c86e2c1df86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.166282] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Created folder: Instances in parent group-v535447. [ 947.166518] env[62974]: DEBUG oslo.service.loopingcall [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.166706] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.166906] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a1385f5-ea17-4654-9c0f-8d744740af79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.198828] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.198828] env[62974]: value = "task-2654752" [ 947.198828] env[62974]: _type = "Task" [ 947.198828] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.207224] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654752, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.217703] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469677} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.217703] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] ef54d01a-5d2c-448a-a060-37520de396ca/ef54d01a-5d2c-448a-a060-37520de396ca.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.217703] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.217703] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43b10072-53c0-4bd2-803b-037c48c82673 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.223534] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 947.223534] env[62974]: value = "task-2654753" [ 947.223534] env[62974]: _type = "Task" [ 947.223534] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.232956] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654753, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.246075] env[62974]: DEBUG nova.objects.instance [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lazy-loading 'flavor' on Instance uuid e23dbff7-d23e-4909-9b33-67ed15c325e7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.258016] env[62974]: DEBUG nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 947.295965] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 947.296294] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.296492] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.296662] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.296806] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.296949] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 947.297334] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 947.297607] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 947.297833] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 947.298015] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 947.298225] env[62974]: DEBUG nova.virt.hardware [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 947.302022] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98055e8d-b142-4304-aee5-5e493789d8df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.308170] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d64fe3-4e35-4350-94a4-e2742c8685c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.482996] env[62974]: DEBUG nova.network.neutron [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updated VIF entry in instance network info cache for port 7a680703-498d-42ed-9269-736752f5f38e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.483433] env[62974]: DEBUG nova.network.neutron [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.551224] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654749, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.606838] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.607480] env[62974]: DEBUG nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 947.611063] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.952s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.611415] env[62974]: DEBUG nova.objects.instance [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lazy-loading 'resources' on Instance uuid 32b17ff4-f7e1-498d-aef7-162f81cd5feb {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.682522] env[62974]: DEBUG nova.network.neutron [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Successfully updated port: 4bf87ffd-a962-46b0-8d3e-aee290745b3d {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.711491] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654752, 'name': CreateVM_Task, 'duration_secs': 0.409849} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.711491] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.712069] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.712240] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.712759] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.712846] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6a8410c-b0f7-49a8-b3d2-2e4912177dbb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.718298] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 947.718298] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ffee07-a855-1c49-f47a-10456f984a0a" [ 947.718298] env[62974]: _type = "Task" [ 947.718298] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.729347] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ffee07-a855-1c49-f47a-10456f984a0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.735841] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654753, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162478} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.736095] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.736828] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd82ae40-0254-4571-9b8e-8cd8ed131098 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.759836] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] ef54d01a-5d2c-448a-a060-37520de396ca/ef54d01a-5d2c-448a-a060-37520de396ca.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.761314] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abced2c7-a542-4dc9-b375-ee31f21bbd0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.776155] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4fbadb22-eff1-4da1-b3f8-f374d0231dfd tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 2.795s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.784587] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 947.784587] env[62974]: value = "task-2654754" [ 947.784587] env[62974]: _type = "Task" [ 947.784587] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.792943] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654754, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.892810] env[62974]: DEBUG nova.network.neutron [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating instance_info_cache with network_info: [{"id": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "address": "fa:16:3e:9f:c5:d4", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ef50dc0-ed", "ovs_interfaceid": "5ef50dc0-edb6-41e4-b27b-22e996c326b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.986155] env[62974]: DEBUG oslo_concurrency.lockutils [req-acf19b0f-57e0-4623-9e05-3828bf8c5afa req-5e8ee220-5a23-4745-8f74-6b6fe109d8ff service nova] Releasing lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.051193] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654749, 'name': Destroy_Task, 'duration_secs': 0.858849} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.051460] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Destroyed the VM [ 948.051702] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 948.051957] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2d3cefbc-96f0-47d7-bca1-356bb204ceef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.058035] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 948.058035] env[62974]: value = "task-2654755" [ 948.058035] env[62974]: _type = "Task" [ 948.058035] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.065816] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654755, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.116770] env[62974]: DEBUG nova.compute.utils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 948.125737] env[62974]: DEBUG nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 948.126034] env[62974]: DEBUG nova.network.neutron [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 948.182339] env[62974]: DEBUG nova.policy [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '989d16ec0ddb48db9797c50907d1a76d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f24d5c2ca88c401c8ea9c0ba1ee445e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 948.184333] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "refresh_cache-92c80524-0fb6-4f28-9a72-bc4ab5793558" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.184468] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquired lock "refresh_cache-92c80524-0fb6-4f28-9a72-bc4ab5793558" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.184607] env[62974]: DEBUG nova.network.neutron [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.230997] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ffee07-a855-1c49-f47a-10456f984a0a, 'name': SearchDatastore_Task, 'duration_secs': 0.009429} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.231907] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.231907] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.231907] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.231907] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.232134] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.232256] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2aa2c52a-c642-475a-b1c9-ca3e522366cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.240977] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.241214] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.241931] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4c7ef71-c569-4db7-a03b-bb199c6ea7f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.246839] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 948.246839] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e7a9f6-1a41-2cf9-96ab-5a04ba1cbe56" [ 948.246839] env[62974]: _type = "Task" [ 948.246839] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.259440] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e7a9f6-1a41-2cf9-96ab-5a04ba1cbe56, 'name': SearchDatastore_Task} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.260147] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac882fae-4214-417e-a5c8-48db35d641b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.267084] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 948.267084] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52be7948-46e0-da52-e296-92a8b0ae1040" [ 948.267084] env[62974]: _type = "Task" [ 948.267084] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.274319] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52be7948-46e0-da52-e296-92a8b0ae1040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.296036] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654754, 'name': ReconfigVM_Task, 'duration_secs': 0.296016} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.296327] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Reconfigured VM instance instance-00000056 to attach disk [datastore1] ef54d01a-5d2c-448a-a060-37520de396ca/ef54d01a-5d2c-448a-a060-37520de396ca.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.296952] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96fdb349-7af2-4422-8994-3cf812f40712 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.303276] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 948.303276] env[62974]: value = "task-2654756" [ 948.303276] env[62974]: _type = "Task" [ 948.303276] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.317632] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654756, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.337227] env[62974]: DEBUG nova.compute.manager [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Received event network-vif-plugged-4bf87ffd-a962-46b0-8d3e-aee290745b3d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 948.337500] env[62974]: DEBUG oslo_concurrency.lockutils [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] Acquiring lock "92c80524-0fb6-4f28-9a72-bc4ab5793558-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.337748] env[62974]: DEBUG oslo_concurrency.lockutils [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.337927] env[62974]: DEBUG oslo_concurrency.lockutils [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.338288] env[62974]: DEBUG nova.compute.manager [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] No waiting events found dispatching network-vif-plugged-4bf87ffd-a962-46b0-8d3e-aee290745b3d {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.340038] env[62974]: WARNING nova.compute.manager [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Received unexpected event network-vif-plugged-4bf87ffd-a962-46b0-8d3e-aee290745b3d for instance with vm_state building and task_state spawning. [ 948.340038] env[62974]: DEBUG nova.compute.manager [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Received event network-changed-4bf87ffd-a962-46b0-8d3e-aee290745b3d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 948.340038] env[62974]: DEBUG nova.compute.manager [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Refreshing instance network info cache due to event network-changed-4bf87ffd-a962-46b0-8d3e-aee290745b3d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 948.340038] env[62974]: DEBUG oslo_concurrency.lockutils [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] Acquiring lock "refresh_cache-92c80524-0fb6-4f28-9a72-bc4ab5793558" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.399686] env[62974]: DEBUG oslo_concurrency.lockutils [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "refresh_cache-c1d0b90c-aa1c-485d-850d-a1495feac7c9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.454506] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33a3495-e37a-4e8d-9a41-de34ab15e0a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.462305] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe2dda0-b8d3-4381-97cc-0d7b6fa62c67 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.500107] env[62974]: DEBUG nova.network.neutron [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Successfully created port: 3f524d04-205f-4f35-a868-33609b02e1ac {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.502609] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b938b78-1ada-487f-abd6-d822307da0b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.510572] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a02032-adf8-4d03-bb94-4f2b10012d63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.526010] env[62974]: DEBUG nova.compute.provider_tree [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.571343] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654755, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.626119] env[62974]: DEBUG nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 948.631060] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.631060] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.631060] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "e23dbff7-d23e-4909-9b33-67ed15c325e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.631266] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.631398] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.633566] env[62974]: INFO nova.compute.manager [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Terminating instance [ 948.733475] env[62974]: DEBUG nova.network.neutron [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.777644] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52be7948-46e0-da52-e296-92a8b0ae1040, 'name': SearchDatastore_Task, 'duration_secs': 0.010847} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.777967] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.778179] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 18489c02-5958-431f-aede-f554d0d785ed/18489c02-5958-431f-aede-f554d0d785ed.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 948.778547] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-856acb93-ac47-4025-8cf4-1e7ae6ec2ce9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.788398] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 948.788398] env[62974]: value = "task-2654757" [ 948.788398] env[62974]: _type = "Task" [ 948.788398] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.796661] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.814651] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654756, 'name': Rename_Task, 'duration_secs': 0.161449} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.818013] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.818299] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-985f8b36-7328-4689-a404-0c89066e85c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.825211] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 948.825211] env[62974]: value = "task-2654758" [ 948.825211] env[62974]: _type = "Task" [ 948.825211] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.834575] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.887696] env[62974]: DEBUG nova.network.neutron [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Updating instance_info_cache with network_info: [{"id": "4bf87ffd-a962-46b0-8d3e-aee290745b3d", "address": "fa:16:3e:f9:29:25", "network": {"id": "a534af09-85c2-4f50-8475-6c022d174ed8", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2045977775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "170b4f70ba6341969a71ff316893d640", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bf87ffd-a9", "ovs_interfaceid": "4bf87ffd-a962-46b0-8d3e-aee290745b3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.029831] env[62974]: DEBUG nova.scheduler.client.report [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.068389] env[62974]: DEBUG oslo_vmware.api [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654755, 'name': RemoveSnapshot_Task, 'duration_secs': 0.800358} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.068678] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 949.137537] env[62974]: DEBUG nova.compute.manager [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 949.137776] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.138716] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3308addf-e0e0-4f02-b4d0-7f7362b24f96 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.147912] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.148200] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9b7915c-0e52-45a1-b0f7-58b4c27f9c7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.155883] env[62974]: DEBUG oslo_vmware.api [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 949.155883] env[62974]: value = "task-2654759" [ 949.155883] env[62974]: _type = "Task" [ 949.155883] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.165087] env[62974]: DEBUG oslo_vmware.api [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.299654] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654757, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.335807] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654758, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.390631] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Releasing lock "refresh_cache-92c80524-0fb6-4f28-9a72-bc4ab5793558" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.391030] env[62974]: DEBUG nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Instance network_info: |[{"id": "4bf87ffd-a962-46b0-8d3e-aee290745b3d", "address": "fa:16:3e:f9:29:25", "network": {"id": "a534af09-85c2-4f50-8475-6c022d174ed8", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2045977775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "170b4f70ba6341969a71ff316893d640", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bf87ffd-a9", "ovs_interfaceid": "4bf87ffd-a962-46b0-8d3e-aee290745b3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 949.392419] env[62974]: DEBUG oslo_concurrency.lockutils [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] Acquired lock "refresh_cache-92c80524-0fb6-4f28-9a72-bc4ab5793558" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.392741] env[62974]: DEBUG nova.network.neutron [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Refreshing network info cache for port 4bf87ffd-a962-46b0-8d3e-aee290745b3d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.394125] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:29:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bf87ffd-a962-46b0-8d3e-aee290745b3d', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.402038] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Creating folder: Project (170b4f70ba6341969a71ff316893d640). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.406455] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-712e22d0-b910-4926-bd30-99f007fbfbe8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.409815] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.410718] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08802d49-2ab8-456f-b7f5-0ff48313d59e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.417889] env[62974]: DEBUG oslo_vmware.api [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 949.417889] env[62974]: value = "task-2654761" [ 949.417889] env[62974]: _type = "Task" [ 949.417889] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.422729] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Created folder: Project (170b4f70ba6341969a71ff316893d640) in parent group-v535199. [ 949.423428] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Creating folder: Instances. Parent ref: group-v535450. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.423557] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-493e4e55-a029-439d-b450-2523de442809 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.428854] env[62974]: DEBUG oslo_vmware.api [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654761, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.437066] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Created folder: Instances in parent group-v535450. [ 949.437661] env[62974]: DEBUG oslo.service.loopingcall [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.437755] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.437976] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e49f0c1-97c8-41ad-88ec-d4ff366ae608 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.459760] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.459760] env[62974]: value = "task-2654763" [ 949.459760] env[62974]: _type = "Task" [ 949.459760] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.468911] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654763, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.536446] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.925s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.539691] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.435s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.540190] env[62974]: DEBUG nova.objects.instance [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lazy-loading 'resources' on Instance uuid 59ece0e8-85c2-499d-aba2-fd45fc116013 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.573083] env[62974]: INFO nova.scheduler.client.report [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Deleted allocations for instance 32b17ff4-f7e1-498d-aef7-162f81cd5feb [ 949.578545] env[62974]: WARNING nova.compute.manager [None req-52705b61-9eb2-4f76-a3af-eb4530000387 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Image not found during snapshot: nova.exception.ImageNotFound: Image c9e1cc29-62e0-4d71-8837-c706f5a09e65 could not be found. [ 949.636941] env[62974]: DEBUG nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 949.666024] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 949.666388] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.666616] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 949.666870] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.667127] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 949.667333] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 949.667601] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 949.667818] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 949.668053] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 949.668264] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 949.668490] env[62974]: DEBUG nova.virt.hardware [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 949.669766] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0348d370-16fb-4b45-8527-18a41b9b0b1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.675823] env[62974]: DEBUG oslo_vmware.api [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654759, 'name': PowerOffVM_Task, 'duration_secs': 0.426784} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.676528] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 949.676750] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.677067] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3260d269-dc9a-4c03-98d7-ca0873681a6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.682820] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13669def-4132-46ee-89fa-0d31a24b9e48 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.774113] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.774649] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.774649] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleting the datastore file [datastore1] e23dbff7-d23e-4909-9b33-67ed15c325e7 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.774768] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b77c0d42-38ab-4052-a6ce-af73d080981f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.781283] env[62974]: DEBUG oslo_vmware.api [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 949.781283] env[62974]: value = "task-2654765" [ 949.781283] env[62974]: _type = "Task" [ 949.781283] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.789737] env[62974]: DEBUG oslo_vmware.api [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.798568] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559278} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.800182] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 18489c02-5958-431f-aede-f554d0d785ed/18489c02-5958-431f-aede-f554d0d785ed.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 949.800182] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.800182] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c8dceb9-6e86-477a-8f6b-e087078d2030 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.806593] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 949.806593] env[62974]: value = "task-2654766" [ 949.806593] env[62974]: _type = "Task" [ 949.806593] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.815582] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654766, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.839875] env[62974]: DEBUG oslo_vmware.api [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654758, 'name': PowerOnVM_Task, 'duration_secs': 0.736084} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.840243] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.840497] env[62974]: INFO nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Took 7.42 seconds to spawn the instance on the hypervisor. [ 949.840738] env[62974]: DEBUG nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.841623] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c3c0a1-e529-457e-aa4c-52d630d5e39a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.911178] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.911311] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.911523] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.911707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.911869] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.915034] env[62974]: INFO nova.compute.manager [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Terminating instance [ 949.929826] env[62974]: DEBUG oslo_vmware.api [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654761, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.970983] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654763, 'name': CreateVM_Task, 'duration_secs': 0.497392} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.971206] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 949.971993] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.972234] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.972599] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 949.972900] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a1378cf-7c9d-4c82-97fe-3f01b7cf2ad1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.977547] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 949.977547] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524c0f69-60c9-0235-cab2-e01fa27e0a34" [ 949.977547] env[62974]: _type = "Task" [ 949.977547] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.986256] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524c0f69-60c9-0235-cab2-e01fa27e0a34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.088593] env[62974]: DEBUG oslo_concurrency.lockutils [None req-68d03560-2d31-4dda-a955-1f3eda3281b0 tempest-ServerPasswordTestJSON-1377007362 tempest-ServerPasswordTestJSON-1377007362-project-member] Lock "32b17ff4-f7e1-498d-aef7-162f81cd5feb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.050s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.178259] env[62974]: DEBUG nova.network.neutron [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Successfully updated port: 3f524d04-205f-4f35-a868-33609b02e1ac {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 950.209327] env[62974]: DEBUG nova.network.neutron [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Updated VIF entry in instance network info cache for port 4bf87ffd-a962-46b0-8d3e-aee290745b3d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.209647] env[62974]: DEBUG nova.network.neutron [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Updating instance_info_cache with network_info: [{"id": "4bf87ffd-a962-46b0-8d3e-aee290745b3d", "address": "fa:16:3e:f9:29:25", "network": {"id": "a534af09-85c2-4f50-8475-6c022d174ed8", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2045977775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "170b4f70ba6341969a71ff316893d640", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bf87ffd-a9", "ovs_interfaceid": "4bf87ffd-a962-46b0-8d3e-aee290745b3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.293546] env[62974]: DEBUG oslo_vmware.api [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274074} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.293800] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.293983] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.294174] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.294350] env[62974]: INFO nova.compute.manager [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 950.294591] env[62974]: DEBUG oslo.service.loopingcall [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.294778] env[62974]: DEBUG nova.compute.manager [-] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 950.295131] env[62974]: DEBUG nova.network.neutron [-] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 950.318357] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654766, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088167} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.318659] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.319488] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da46efcb-f280-44ed-89b0-ad0c0bc38588 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.343801] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 18489c02-5958-431f-aede-f554d0d785ed/18489c02-5958-431f-aede-f554d0d785ed.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.345091] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d53be9b-2b41-4f99-8822-b2dd93e9ed4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.359673] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be315d5e-c05c-4612-aafd-285f01ced328 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.372140] env[62974]: DEBUG nova.compute.manager [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Received event network-vif-plugged-3f524d04-205f-4f35-a868-33609b02e1ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 950.372363] env[62974]: DEBUG oslo_concurrency.lockutils [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] Acquiring lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.372907] env[62974]: DEBUG oslo_concurrency.lockutils [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.373111] env[62974]: DEBUG oslo_concurrency.lockutils [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.373308] env[62974]: DEBUG nova.compute.manager [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] No waiting events found dispatching network-vif-plugged-3f524d04-205f-4f35-a868-33609b02e1ac {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 950.373452] env[62974]: WARNING nova.compute.manager [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Received unexpected event network-vif-plugged-3f524d04-205f-4f35-a868-33609b02e1ac for instance with vm_state building and task_state spawning. [ 950.373612] env[62974]: DEBUG nova.compute.manager [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Received event network-changed-3f524d04-205f-4f35-a868-33609b02e1ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 950.373765] env[62974]: DEBUG nova.compute.manager [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Refreshing instance network info cache due to event network-changed-3f524d04-205f-4f35-a868-33609b02e1ac. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 950.373948] env[62974]: DEBUG oslo_concurrency.lockutils [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] Acquiring lock "refresh_cache-aa6eb55e-79c0-4e1f-8756-05dff97b06d2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.374099] env[62974]: DEBUG oslo_concurrency.lockutils [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] Acquired lock "refresh_cache-aa6eb55e-79c0-4e1f-8756-05dff97b06d2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.374270] env[62974]: DEBUG nova.network.neutron [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Refreshing network info cache for port 3f524d04-205f-4f35-a868-33609b02e1ac {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.377719] env[62974]: INFO nova.compute.manager [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Took 30.00 seconds to build instance. [ 950.379772] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 950.379772] env[62974]: value = "task-2654767" [ 950.379772] env[62974]: _type = "Task" [ 950.379772] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.380901] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf6b86f-21eb-4bf8-8232-62c8da61eb38 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.394902] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654767, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.419884] env[62974]: DEBUG nova.compute.manager [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 950.420134] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.421182] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891e93df-012a-4645-a6cd-190b7d6d8528 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.424318] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9955b1d-6041-4f45-adad-7674ac0fbd59 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.433561] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.438028] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7af43436-71c3-43c2-a2ba-b22bafa3af24 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.439448] env[62974]: DEBUG oslo_vmware.api [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654761, 'name': PowerOnVM_Task, 'duration_secs': 0.534651} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.439753] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.440016] env[62974]: DEBUG nova.compute.manager [None req-49237d54-d085-4117-a313-f7a45a5a4bbf tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 950.441244] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af2f8e6-a7f7-4c95-bae9-a65970c323c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.446552] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5daa7c15-7d9d-4f93-b95b-c274e9150c4c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.449486] env[62974]: DEBUG oslo_vmware.api [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 950.449486] env[62974]: value = "task-2654768" [ 950.449486] env[62974]: _type = "Task" [ 950.449486] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.464334] env[62974]: DEBUG nova.compute.provider_tree [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.470435] env[62974]: DEBUG oslo_vmware.api [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654768, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.487506] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524c0f69-60c9-0235-cab2-e01fa27e0a34, 'name': SearchDatastore_Task, 'duration_secs': 0.008374} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.488238] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.488238] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.488437] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.488437] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.488582] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.488805] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4142ee28-735b-42d3-867f-4554f08b2099 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.497993] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.498232] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.498937] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d657e22e-4969-4dda-9dd8-4176e33f4feb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.505074] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 950.505074] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cce56c-9af6-8696-c5df-abd5670257f8" [ 950.505074] env[62974]: _type = "Task" [ 950.505074] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.513253] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cce56c-9af6-8696-c5df-abd5670257f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.685735] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "refresh_cache-aa6eb55e-79c0-4e1f-8756-05dff97b06d2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.713345] env[62974]: DEBUG oslo_concurrency.lockutils [req-c165dafc-fe39-4075-96f8-a4a8b8d18aad req-619d0452-a0fd-48fc-bac3-4cbfdb5afe25 service nova] Releasing lock "refresh_cache-92c80524-0fb6-4f28-9a72-bc4ab5793558" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.882782] env[62974]: DEBUG oslo_concurrency.lockutils [None req-92678e42-7572-4d7f-8ea8-bfe4f5570d88 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.510s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.895125] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654767, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.922998] env[62974]: DEBUG nova.network.neutron [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.964240] env[62974]: DEBUG oslo_vmware.api [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654768, 'name': PowerOffVM_Task, 'duration_secs': 0.197934} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.964240] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.964240] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.964240] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0428f33e-945a-4847-9238-eb05d43ff8de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.972456] env[62974]: DEBUG nova.scheduler.client.report [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.017845] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cce56c-9af6-8696-c5df-abd5670257f8, 'name': SearchDatastore_Task, 'duration_secs': 0.010504} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.019351] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf842b7b-8b66-4d84-a2cc-fe2ae64c1306 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.034036] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 951.034036] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5232b6b5-d4b1-9364-d3db-c04f200889ef" [ 951.034036] env[62974]: _type = "Task" [ 951.034036] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.034580] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.034956] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.035774] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleting the datastore file [datastore2] b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.039550] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55f3f11d-0a72-4639-8df1-e2b1dc45b0ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.046854] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5232b6b5-d4b1-9364-d3db-c04f200889ef, 'name': SearchDatastore_Task, 'duration_secs': 0.010373} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.047146] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.047774] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 92c80524-0fb6-4f28-9a72-bc4ab5793558/92c80524-0fb6-4f28-9a72-bc4ab5793558.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.048069] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5576ad85-e824-4017-916b-bc2d4f789adb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.054511] env[62974]: DEBUG oslo_vmware.api [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for the task: (returnval){ [ 951.054511] env[62974]: value = "task-2654770" [ 951.054511] env[62974]: _type = "Task" [ 951.054511] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.059037] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 951.059037] env[62974]: value = "task-2654771" [ 951.059037] env[62974]: _type = "Task" [ 951.059037] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.065974] env[62974]: DEBUG oslo_vmware.api [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.070906] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654771, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.151132] env[62974]: DEBUG nova.network.neutron [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.315162] env[62974]: DEBUG nova.network.neutron [-] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.395967] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654767, 'name': ReconfigVM_Task, 'duration_secs': 0.923264} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.395967] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 18489c02-5958-431f-aede-f554d0d785ed/18489c02-5958-431f-aede-f554d0d785ed.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.395967] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52a4ce40-5bd3-4648-8260-351f626765c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.402982] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 951.402982] env[62974]: value = "task-2654772" [ 951.402982] env[62974]: _type = "Task" [ 951.402982] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.412095] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654772, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.478743] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.939s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.481414] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 13.697s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.507865] env[62974]: INFO nova.scheduler.client.report [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Deleted allocations for instance 59ece0e8-85c2-499d-aba2-fd45fc116013 [ 951.572121] env[62974]: DEBUG oslo_vmware.api [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Task: {'id': task-2654770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163019} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.575874] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.576113] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.576327] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.576548] env[62974]: INFO nova.compute.manager [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Took 1.16 seconds to destroy the instance on the hypervisor. [ 951.576857] env[62974]: DEBUG oslo.service.loopingcall [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.577373] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654771, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.577588] env[62974]: DEBUG nova.compute.manager [-] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.577683] env[62974]: DEBUG nova.network.neutron [-] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.653697] env[62974]: DEBUG oslo_concurrency.lockutils [req-e8569b96-1503-487c-bff6-ac84db5d0da1 req-0dcc3c9e-9774-413c-9527-635a2920225d service nova] Releasing lock "refresh_cache-aa6eb55e-79c0-4e1f-8756-05dff97b06d2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.654121] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquired lock "refresh_cache-aa6eb55e-79c0-4e1f-8756-05dff97b06d2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.654278] env[62974]: DEBUG nova.network.neutron [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 951.704995] env[62974]: DEBUG oslo_concurrency.lockutils [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "ef54d01a-5d2c-448a-a060-37520de396ca" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.705259] env[62974]: DEBUG oslo_concurrency.lockutils [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.705486] env[62974]: INFO nova.compute.manager [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Rebooting instance [ 951.818236] env[62974]: INFO nova.compute.manager [-] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Took 1.52 seconds to deallocate network for instance. [ 951.916110] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654772, 'name': Rename_Task, 'duration_secs': 0.23019} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.920806] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.920806] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0da64f33-4864-4054-8d26-2ad83651bf3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.927211] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 951.927211] env[62974]: value = "task-2654773" [ 951.927211] env[62974]: _type = "Task" [ 951.927211] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.933013] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654773, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.020482] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a1871dfb-5dc1-486c-860e-96ae3d8ef471 tempest-VolumesAdminNegativeTest-2067265764 tempest-VolumesAdminNegativeTest-2067265764-project-member] Lock "59ece0e8-85c2-499d-aba2-fd45fc116013" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.778s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.079731] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654771, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520731} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.079992] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 92c80524-0fb6-4f28-9a72-bc4ab5793558/92c80524-0fb6-4f28-9a72-bc4ab5793558.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.080334] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.081602] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78cd2ae9-a56e-4808-ac6e-8af65c477b5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.095629] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 952.095629] env[62974]: value = "task-2654774" [ 952.095629] env[62974]: _type = "Task" [ 952.095629] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.108157] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654774, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.235583] env[62974]: DEBUG oslo_concurrency.lockutils [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.235583] env[62974]: DEBUG oslo_concurrency.lockutils [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquired lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.235583] env[62974]: DEBUG nova.network.neutron [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.238566] env[62974]: DEBUG nova.network.neutron [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 952.327594] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.389150] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c59d6e-f3c1-41fb-bc28-3679dfafa252 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.398499] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7e1813-5177-45a8-92d0-30f8f8abac5a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.450158] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286d4780-1431-4db9-81c4-cf81427fa53d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.461757] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d65bc13-d8d2-4832-bad4-d15e223eec85 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.466010] env[62974]: DEBUG oslo_vmware.api [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654773, 'name': PowerOnVM_Task, 'duration_secs': 0.531827} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.466312] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.466526] env[62974]: INFO nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Took 7.62 seconds to spawn the instance on the hypervisor. [ 952.466707] env[62974]: DEBUG nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.467942] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58e3100-883b-4591-945e-64ebdf158b3b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.482818] env[62974]: DEBUG nova.compute.provider_tree [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.513949] env[62974]: DEBUG nova.network.neutron [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Updating instance_info_cache with network_info: [{"id": "3f524d04-205f-4f35-a868-33609b02e1ac", "address": "fa:16:3e:96:d3:25", "network": {"id": "3931a238-2fe9-4be3-beb3-a4f6dd420c73", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1537523231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f24d5c2ca88c401c8ea9c0ba1ee445e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f524d04-20", "ovs_interfaceid": "3f524d04-205f-4f35-a868-33609b02e1ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.520363] env[62974]: DEBUG nova.compute.manager [req-bd60764a-0b2c-4beb-af89-0ab10dddb0ad req-0e9ff172-83d5-498d-b53f-756dfb7ff4a0 service nova] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Received event network-vif-deleted-1f4c134a-f095-4872-9ffc-8b90d02f29f9 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 952.520363] env[62974]: DEBUG nova.compute.manager [req-bd60764a-0b2c-4beb-af89-0ab10dddb0ad req-0e9ff172-83d5-498d-b53f-756dfb7ff4a0 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Received event network-vif-deleted-fd4e4478-3958-46b8-a54d-e6619377d377 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 952.520363] env[62974]: INFO nova.compute.manager [req-bd60764a-0b2c-4beb-af89-0ab10dddb0ad req-0e9ff172-83d5-498d-b53f-756dfb7ff4a0 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Neutron deleted interface fd4e4478-3958-46b8-a54d-e6619377d377; detaching it from the instance and deleting it from the info cache [ 952.520669] env[62974]: DEBUG nova.network.neutron [req-bd60764a-0b2c-4beb-af89-0ab10dddb0ad req-0e9ff172-83d5-498d-b53f-756dfb7ff4a0 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.606838] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654774, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067705} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.607254] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.608169] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f354640f-f357-4af1-b626-3c729de7df2f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.631957] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 92c80524-0fb6-4f28-9a72-bc4ab5793558/92c80524-0fb6-4f28-9a72-bc4ab5793558.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.632291] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4261c8f-93e7-41c3-b252-ff7947ce9cf7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.652132] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 952.652132] env[62974]: value = "task-2654775" [ 952.652132] env[62974]: _type = "Task" [ 952.652132] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.662028] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654775, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.746960] env[62974]: DEBUG nova.network.neutron [-] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.986229] env[62974]: DEBUG nova.scheduler.client.report [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.005565] env[62974]: INFO nova.compute.manager [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Took 30.39 seconds to build instance. [ 953.016635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Releasing lock "refresh_cache-aa6eb55e-79c0-4e1f-8756-05dff97b06d2" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.017144] env[62974]: DEBUG nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Instance network_info: |[{"id": "3f524d04-205f-4f35-a868-33609b02e1ac", "address": "fa:16:3e:96:d3:25", "network": {"id": "3931a238-2fe9-4be3-beb3-a4f6dd420c73", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1537523231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f24d5c2ca88c401c8ea9c0ba1ee445e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f524d04-20", "ovs_interfaceid": "3f524d04-205f-4f35-a868-33609b02e1ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 953.018278] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:d3:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f524d04-205f-4f35-a868-33609b02e1ac', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.029628] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Creating folder: Project (f24d5c2ca88c401c8ea9c0ba1ee445e7). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 953.031443] env[62974]: DEBUG nova.network.neutron [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Updating instance_info_cache with network_info: [{"id": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "address": "fa:16:3e:93:b0:61", "network": {"id": "3d204a06-2895-4d42-897a-0b36774f6e9a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-704885625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fca1731aab4207a0f333e5d6f630f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccb1f4c6-57", "ovs_interfaceid": "ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.033664] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b454002-2057-4395-932e-5e8021cb9b14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.040143] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb0db5af-7377-4d44-a4a4-2958aa236944 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.048250] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba9cb4f-c22a-4434-8661-f7fe039aedfe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.062900] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Created folder: Project (f24d5c2ca88c401c8ea9c0ba1ee445e7) in parent group-v535199. [ 953.063200] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Creating folder: Instances. Parent ref: group-v535453. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 953.063717] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bfcf503-31f1-4260-b71f-9aa7d0c481b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.073192] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Created folder: Instances in parent group-v535453. [ 953.073192] env[62974]: DEBUG oslo.service.loopingcall [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.073192] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.073464] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f596d746-4776-4879-94d6-bec9ee37b6aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.103450] env[62974]: DEBUG nova.compute.manager [req-bd60764a-0b2c-4beb-af89-0ab10dddb0ad req-0e9ff172-83d5-498d-b53f-756dfb7ff4a0 service nova] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Detach interface failed, port_id=fd4e4478-3958-46b8-a54d-e6619377d377, reason: Instance b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 953.108272] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.108272] env[62974]: value = "task-2654778" [ 953.108272] env[62974]: _type = "Task" [ 953.108272] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.116200] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654778, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.163615] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654775, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.251094] env[62974]: INFO nova.compute.manager [-] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Took 1.67 seconds to deallocate network for instance. [ 953.507858] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f8cd47e1-05b2-4620-a3a1-3e60196c61ea tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "18489c02-5958-431f-aede-f554d0d785ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.904s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.538690] env[62974]: DEBUG oslo_concurrency.lockutils [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Releasing lock "refresh_cache-ef54d01a-5d2c-448a-a060-37520de396ca" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.618978] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654778, 'name': CreateVM_Task, 'duration_secs': 0.369487} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.620063] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 953.620820] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.621028] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.621426] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 953.621647] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88ea65b1-0b83-46fc-87ea-b123b7acd46f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.627136] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 953.627136] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f9426-fb45-8d9d-2796-334d9cb4b818" [ 953.627136] env[62974]: _type = "Task" [ 953.627136] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.635988] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f9426-fb45-8d9d-2796-334d9cb4b818, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.663740] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654775, 'name': ReconfigVM_Task, 'duration_secs': 0.698633} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.664169] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 92c80524-0fb6-4f28-9a72-bc4ab5793558/92c80524-0fb6-4f28-9a72-bc4ab5793558.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.665128] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df531b68-ac0c-4932-8d7b-327be3c8624c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.674534] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 953.674534] env[62974]: value = "task-2654779" [ 953.674534] env[62974]: _type = "Task" [ 953.674534] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.685705] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654779, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.760447] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.001906] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.521s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.005106] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.511s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.005399] env[62974]: DEBUG nova.objects.instance [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'resources' on Instance uuid cc7c25b5-1463-4eab-8d8f-f812d4f16c34 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.043144] env[62974]: DEBUG nova.compute.manager [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 954.044084] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31a8280-52a0-491e-afc7-4a955202e11a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.140478] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f9426-fb45-8d9d-2796-334d9cb4b818, 'name': SearchDatastore_Task, 'duration_secs': 0.010042} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.140759] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.140993] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.141264] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.141404] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.141581] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.141832] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9c3a4a0-59da-4261-a95c-38db6e462bd1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.152012] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.152240] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.153046] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00930e85-bb07-4095-ae4d-54270a089a29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.159100] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 954.159100] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522420b2-a79a-aeea-dde2-a2e6a58632f8" [ 954.159100] env[62974]: _type = "Task" [ 954.159100] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.168278] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522420b2-a79a-aeea-dde2-a2e6a58632f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.184514] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654779, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.553683] env[62974]: DEBUG nova.compute.manager [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received event network-changed-7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 954.553886] env[62974]: DEBUG nova.compute.manager [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Refreshing instance network info cache due to event network-changed-7a680703-498d-42ed-9269-736752f5f38e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 954.554138] env[62974]: DEBUG oslo_concurrency.lockutils [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] Acquiring lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.554257] env[62974]: DEBUG oslo_concurrency.lockutils [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] Acquired lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.554450] env[62974]: DEBUG nova.network.neutron [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Refreshing network info cache for port 7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.600871] env[62974]: INFO nova.scheduler.client.report [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted allocation for migration 72f105a4-6bb5-4b6d-9659-0904cb1114d8 [ 954.672547] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522420b2-a79a-aeea-dde2-a2e6a58632f8, 'name': SearchDatastore_Task, 'duration_secs': 0.014224} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.673426] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1815f864-199f-4031-81e6-563e738a4dc1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.682473] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 954.682473] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528cba6f-981e-e852-1229-272255468e5e" [ 954.682473] env[62974]: _type = "Task" [ 954.682473] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.689941] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654779, 'name': Rename_Task, 'duration_secs': 0.97919} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.690873] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.691162] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-348e02fe-560c-4409-9e84-6dcc7ad7eec7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.696407] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528cba6f-981e-e852-1229-272255468e5e, 'name': SearchDatastore_Task, 'duration_secs': 0.010436} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.699169] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.699513] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] aa6eb55e-79c0-4e1f-8756-05dff97b06d2/aa6eb55e-79c0-4e1f-8756-05dff97b06d2.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 954.699950] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90967e38-62bd-4624-8ec4-c089867c395c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.704891] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 954.704891] env[62974]: value = "task-2654780" [ 954.704891] env[62974]: _type = "Task" [ 954.704891] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.709470] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 954.709470] env[62974]: value = "task-2654781" [ 954.709470] env[62974]: _type = "Task" [ 954.709470] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.719385] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654780, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.726317] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.838012] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a4f98b-2b40-461b-87ff-f29beac87a2f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.846704] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fc5bf5-09e7-4e0b-8bba-9d2ab863d918 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.881167] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e44b0c-49f7-423f-851f-de587cb553d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.898307] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c125fa-135b-4c33-9552-560fc17fcab7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.922442] env[62974]: DEBUG nova.compute.provider_tree [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.068720] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f2b74a-5ac0-49e9-9b59-31b832f4b9b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.079474] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Doing hard reboot of VM {{(pid=62974) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 955.079913] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-3eb49e93-57f0-47b2-9b8a-0ffa403b7ad6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.089109] env[62974]: DEBUG oslo_vmware.api [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 955.089109] env[62974]: value = "task-2654782" [ 955.089109] env[62974]: _type = "Task" [ 955.089109] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.101699] env[62974]: DEBUG oslo_vmware.api [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654782, 'name': ResetVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.106133] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0e2bf23-4c70-466b-8bd6-89bf106c77f8 tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 20.693s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.217083] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654780, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.225821] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486335} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.229328] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] aa6eb55e-79c0-4e1f-8756-05dff97b06d2/aa6eb55e-79c0-4e1f-8756-05dff97b06d2.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 955.229565] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 955.229819] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83455889-1c5b-4eaf-b228-c7ec17a289fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.238564] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 955.238564] env[62974]: value = "task-2654783" [ 955.238564] env[62974]: _type = "Task" [ 955.238564] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.248403] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654783, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.318081] env[62974]: DEBUG nova.network.neutron [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updated VIF entry in instance network info cache for port 7a680703-498d-42ed-9269-736752f5f38e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.318475] env[62974]: DEBUG nova.network.neutron [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.426169] env[62974]: DEBUG nova.scheduler.client.report [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 955.601223] env[62974]: DEBUG oslo_vmware.api [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654782, 'name': ResetVM_Task, 'duration_secs': 0.109803} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.601537] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Did hard reboot of VM {{(pid=62974) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 955.601781] env[62974]: DEBUG nova.compute.manager [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.602611] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b84cfd-2561-41d6-a838-6235c554a75f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.716924] env[62974]: DEBUG oslo_vmware.api [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654780, 'name': PowerOnVM_Task, 'duration_secs': 0.778988} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.717330] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.717617] env[62974]: INFO nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Took 8.46 seconds to spawn the instance on the hypervisor. [ 955.717894] env[62974]: DEBUG nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.721048] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9e52ed-7866-48f3-ad63-e293dacf0774 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.747752] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654783, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071999} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.748027] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 955.748796] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec2f99b-99ba-46b8-91f5-0491829437cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.771648] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] aa6eb55e-79c0-4e1f-8756-05dff97b06d2/aa6eb55e-79c0-4e1f-8756-05dff97b06d2.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 955.772520] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f40a1431-1636-41f0-aabd-b148e31e8321 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.793062] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 955.793062] env[62974]: value = "task-2654784" [ 955.793062] env[62974]: _type = "Task" [ 955.793062] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.801680] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654784, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.820877] env[62974]: DEBUG oslo_concurrency.lockutils [req-43d8f35d-5ae1-4583-9abf-d55a6a80fd84 req-4b2c42ef-ffb5-4cc2-b820-81417109e9e4 service nova] Releasing lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.875798] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "e11408df-466c-4101-b0cc-3621cda78a45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.876155] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.876409] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "e11408df-466c-4101-b0cc-3621cda78a45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.876606] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.876779] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.879070] env[62974]: INFO nova.compute.manager [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Terminating instance [ 955.915096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.915458] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.936132] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.937836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.983s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.939367] env[62974]: INFO nova.compute.claims [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.974471] env[62974]: INFO nova.scheduler.client.report [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted allocations for instance cc7c25b5-1463-4eab-8d8f-f812d4f16c34 [ 956.123624] env[62974]: DEBUG oslo_concurrency.lockutils [None req-78d53438-7c26-4c27-83c8-c4f14a32c5b0 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.418s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.238407] env[62974]: INFO nova.compute.manager [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Took 31.87 seconds to build instance. [ 956.306521] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654784, 'name': ReconfigVM_Task, 'duration_secs': 0.304933} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.306828] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Reconfigured VM instance instance-00000059 to attach disk [datastore2] aa6eb55e-79c0-4e1f-8756-05dff97b06d2/aa6eb55e-79c0-4e1f-8756-05dff97b06d2.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.307470] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22114e3d-a83e-4cd8-9afd-86aadc15150e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.311134] env[62974]: INFO nova.compute.manager [None req-05fb3778-86c1-4f88-b9c0-08c668090a75 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Get console output [ 956.311457] env[62974]: WARNING nova.virt.vmwareapi.driver [None req-05fb3778-86c1-4f88-b9c0-08c668090a75 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] The console log is missing. Check your VSPC configuration [ 956.318913] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 956.318913] env[62974]: value = "task-2654785" [ 956.318913] env[62974]: _type = "Task" [ 956.318913] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.328440] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654785, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.384919] env[62974]: DEBUG nova.compute.manager [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 956.385091] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.386126] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55b6ac8-bd98-4571-854a-57e44ef5072e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.394968] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.395245] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-866ead92-dd78-42a9-bc1e-fe9b472389e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.402703] env[62974]: DEBUG oslo_vmware.api [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 956.402703] env[62974]: value = "task-2654786" [ 956.402703] env[62974]: _type = "Task" [ 956.402703] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.412830] env[62974]: DEBUG oslo_vmware.api [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.418956] env[62974]: INFO nova.compute.manager [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Detaching volume 13787642-ed9f-449c-b672-b1b3b50942b0 [ 956.459026] env[62974]: INFO nova.virt.block_device [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Attempting to driver detach volume 13787642-ed9f-449c-b672-b1b3b50942b0 from mountpoint /dev/sdb [ 956.459318] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 956.459532] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 956.460792] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6489e3eb-de2e-45d0-a2fb-7dcb44f71fe0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.487736] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95271ff5-b897-44a9-bc51-915b7edca1c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.491435] env[62974]: DEBUG oslo_concurrency.lockutils [None req-71db062a-a178-468d-8143-aa9562da5ac1 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "cc7c25b5-1463-4eab-8d8f-f812d4f16c34" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.890s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.498760] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6976783b-478e-4ba0-93f2-d8d86ccbe5b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.521066] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ff01da-4541-475b-ba46-371c391717ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.539576] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] The volume has not been displaced from its original location: [datastore1] volume-13787642-ed9f-449c-b672-b1b3b50942b0/volume-13787642-ed9f-449c-b672-b1b3b50942b0.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 956.545107] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfiguring VM instance instance-0000004a to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 956.545499] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef3a329a-1b47-4f78-8cce-910ea31b34a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.565822] env[62974]: DEBUG oslo_vmware.api [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 956.565822] env[62974]: value = "task-2654787" [ 956.565822] env[62974]: _type = "Task" [ 956.565822] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.575234] env[62974]: DEBUG oslo_vmware.api [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654787, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.609917] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "c79afcfb-25ce-4130-96d5-5148d968e5bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.610301] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.741045] env[62974]: DEBUG oslo_concurrency.lockutils [None req-706a0df2-9ea8-4b10-af99-237449740da6 tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.411s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.830623] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654785, 'name': Rename_Task, 'duration_secs': 0.165271} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.831136] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.831318] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b934851-5236-4ec4-88b1-42633d5bdb28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.841696] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 956.841696] env[62974]: value = "task-2654788" [ 956.841696] env[62974]: _type = "Task" [ 956.841696] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.845535] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "ef54d01a-5d2c-448a-a060-37520de396ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.846169] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.846169] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "ef54d01a-5d2c-448a-a060-37520de396ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.846317] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.846855] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.848714] env[62974]: INFO nova.compute.manager [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Terminating instance [ 956.855035] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.915332] env[62974]: DEBUG oslo_vmware.api [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654786, 'name': PowerOffVM_Task, 'duration_secs': 0.249262} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.915976] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.916109] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.916393] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-686e9d8f-5560-46c6-8240-32ba27b37c3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.994659] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.994886] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.995119] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleting the datastore file [datastore1] e11408df-466c-4101-b0cc-3621cda78a45 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.995360] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c60fda2f-86fa-470c-a8aa-fed30e47140e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.011049] env[62974]: DEBUG oslo_vmware.api [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 957.011049] env[62974]: value = "task-2654790" [ 957.011049] env[62974]: _type = "Task" [ 957.011049] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.025754] env[62974]: DEBUG oslo_vmware.api [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.080968] env[62974]: DEBUG oslo_vmware.api [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654787, 'name': ReconfigVM_Task, 'duration_secs': 0.317507} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.081393] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Reconfigured VM instance instance-0000004a to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 957.086849] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d13b6844-76e2-44e6-b280-ad791a1270e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.109797] env[62974]: DEBUG oslo_vmware.api [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 957.109797] env[62974]: value = "task-2654791" [ 957.109797] env[62974]: _type = "Task" [ 957.109797] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.117842] env[62974]: DEBUG nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 957.131379] env[62974]: DEBUG oslo_vmware.api [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654791, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.310977] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb72064-412a-4515-a24b-fc60cd738da9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.324439] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3095e8f7-27a3-40a6-ab8d-a88930e618b0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.359597] env[62974]: DEBUG nova.compute.manager [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 957.359766] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.363664] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bda479-a0f9-4dba-a8f7-753c5aad5b77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.367883] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88f0544-afa1-4745-b32f-3d40076435e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.382149] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e712011-ad6a-49f2-8893-02925558cb89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.385711] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.385858] env[62974]: DEBUG oslo_vmware.api [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654788, 'name': PowerOnVM_Task, 'duration_secs': 0.513274} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.386078] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8cebb6c5-3422-4153-9bfa-a376d898ed60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.387572] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.388015] env[62974]: INFO nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Took 7.75 seconds to spawn the instance on the hypervisor. [ 957.388015] env[62974]: DEBUG nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.389177] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6db2ca-2d74-4a04-8fb9-78aaeb14c403 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.400460] env[62974]: DEBUG nova.compute.provider_tree [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.405175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "69fb00b3-6a41-4ef5-8876-6548cae31c07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.405175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.405175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "69fb00b3-6a41-4ef5-8876-6548cae31c07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.405175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.405344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.412415] env[62974]: INFO nova.compute.manager [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Terminating instance [ 957.414625] env[62974]: DEBUG oslo_vmware.api [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 957.414625] env[62974]: value = "task-2654792" [ 957.414625] env[62974]: _type = "Task" [ 957.414625] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.428048] env[62974]: DEBUG oslo_vmware.api [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.522886] env[62974]: DEBUG oslo_vmware.api [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177688} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.523368] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.523723] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.524044] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.524363] env[62974]: INFO nova.compute.manager [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Took 1.14 seconds to destroy the instance on the hypervisor. [ 957.524728] env[62974]: DEBUG oslo.service.loopingcall [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.525062] env[62974]: DEBUG nova.compute.manager [-] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 957.525281] env[62974]: DEBUG nova.network.neutron [-] [instance: e11408df-466c-4101-b0cc-3621cda78a45] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 957.621828] env[62974]: DEBUG oslo_vmware.api [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654791, 'name': ReconfigVM_Task, 'duration_secs': 0.241708} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.625375] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535427', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'name': 'volume-13787642-ed9f-449c-b672-b1b3b50942b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3df97cea-5a6e-4d7a-b2f3-e02213816e24', 'attached_at': '', 'detached_at': '', 'volume_id': '13787642-ed9f-449c-b672-b1b3b50942b0', 'serial': '13787642-ed9f-449c-b672-b1b3b50942b0'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 957.650037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.785067] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "92c80524-0fb6-4f28-9a72-bc4ab5793558" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.785067] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.785248] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "92c80524-0fb6-4f28-9a72-bc4ab5793558-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.785297] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.785747] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.791683] env[62974]: INFO nova.compute.manager [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Terminating instance [ 957.904115] env[62974]: DEBUG nova.scheduler.client.report [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.914069] env[62974]: DEBUG nova.compute.manager [req-1203e903-2446-4c85-86be-b87d80968eb5 req-b9214ab5-e86a-488a-b617-1972089fb9bc service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Received event network-vif-deleted-39690695-af5c-4491-9d0f-b5ea691ce54f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 957.914565] env[62974]: INFO nova.compute.manager [req-1203e903-2446-4c85-86be-b87d80968eb5 req-b9214ab5-e86a-488a-b617-1972089fb9bc service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Neutron deleted interface 39690695-af5c-4491-9d0f-b5ea691ce54f; detaching it from the instance and deleting it from the info cache [ 957.914565] env[62974]: DEBUG nova.network.neutron [req-1203e903-2446-4c85-86be-b87d80968eb5 req-b9214ab5-e86a-488a-b617-1972089fb9bc service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.927134] env[62974]: DEBUG nova.compute.manager [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 957.930264] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.933967] env[62974]: INFO nova.compute.manager [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Took 26.97 seconds to build instance. [ 957.938570] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe96d33-71eb-4388-810a-08f0a53865f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.949238] env[62974]: DEBUG oslo_vmware.api [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654792, 'name': PowerOffVM_Task, 'duration_secs': 0.388702} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.952302] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.952302] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.952562] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.953271] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d0d9014-52a9-4269-ac45-d7465ec440a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.954504] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9efdd8b5-6692-40e7-89b2-aef9f77f2f8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.963906] env[62974]: DEBUG oslo_vmware.api [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 957.963906] env[62974]: value = "task-2654794" [ 957.963906] env[62974]: _type = "Task" [ 957.963906] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.973886] env[62974]: DEBUG oslo_vmware.api [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654794, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.186936] env[62974]: DEBUG nova.objects.instance [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'flavor' on Instance uuid 3df97cea-5a6e-4d7a-b2f3-e02213816e24 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.301020] env[62974]: DEBUG nova.compute.manager [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 958.301020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 958.301020] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205cf967-14a3-458b-a87e-6e8726e86e51 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.309081] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 958.309554] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ae7aaad-086d-4ac1-8f97-1b72b0863d16 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.320199] env[62974]: DEBUG oslo_vmware.api [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 958.320199] env[62974]: value = "task-2654795" [ 958.320199] env[62974]: _type = "Task" [ 958.320199] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.331488] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.331560] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.334122] env[62974]: DEBUG oslo_vmware.api [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.376403] env[62974]: DEBUG nova.network.neutron [-] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.411928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.411928] env[62974]: DEBUG nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 958.414216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.394s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.417224] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d41f343e-e52b-4ad9-aab4-a486eaafdb5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.430344] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee29d345-4d57-460a-b421-510921c61c68 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.442491] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fad113eb-1ebe-40b2-bcf2-34c1c31059c5 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.492s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.468722] env[62974]: DEBUG nova.compute.manager [req-1203e903-2446-4c85-86be-b87d80968eb5 req-b9214ab5-e86a-488a-b617-1972089fb9bc service nova] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Detach interface failed, port_id=39690695-af5c-4491-9d0f-b5ea691ce54f, reason: Instance e11408df-466c-4101-b0cc-3621cda78a45 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 958.478709] env[62974]: DEBUG oslo_vmware.api [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654794, 'name': PowerOffVM_Task, 'duration_secs': 0.23115} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.478989] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.479180] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.479469] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1158c8eb-35d7-487b-b3fe-49158f6d760a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.828964] env[62974]: DEBUG oslo_vmware.api [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654795, 'name': PowerOffVM_Task, 'duration_secs': 0.421932} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.829295] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.829509] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.829827] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6a4523d-62fa-4d61-b899-7e8767b999f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.835066] env[62974]: DEBUG nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 958.878764] env[62974]: INFO nova.compute.manager [-] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Took 1.35 seconds to deallocate network for instance. [ 958.917877] env[62974]: DEBUG nova.compute.utils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 958.923213] env[62974]: INFO nova.compute.claims [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.928414] env[62974]: DEBUG nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 958.928718] env[62974]: DEBUG nova.network.neutron [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 958.949649] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.949882] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.950076] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Deleting the datastore file [datastore1] ef54d01a-5d2c-448a-a060-37520de396ca {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.951174] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b936c16-f643-4bd8-93cf-feffc4875837 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.959073] env[62974]: DEBUG oslo_vmware.api [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for the task: (returnval){ [ 958.959073] env[62974]: value = "task-2654798" [ 958.959073] env[62974]: _type = "Task" [ 958.959073] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.972262] env[62974]: DEBUG oslo_vmware.api [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.973225] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.973432] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.973615] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleting the datastore file [datastore2] 69fb00b3-6a41-4ef5-8876-6548cae31c07 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.974231] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d3a3fc9-28e1-49f6-a373-6d97f0341f8a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.982349] env[62974]: DEBUG oslo_vmware.api [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 958.982349] env[62974]: value = "task-2654799" [ 958.982349] env[62974]: _type = "Task" [ 958.982349] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.988884] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.989187] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.989545] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Deleting the datastore file [datastore1] 92c80524-0fb6-4f28-9a72-bc4ab5793558 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.993019] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d911a7e-ff4e-4380-bdcb-a498bc74f17e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.995345] env[62974]: DEBUG oslo_vmware.api [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.004260] env[62974]: DEBUG oslo_vmware.api [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for the task: (returnval){ [ 959.004260] env[62974]: value = "task-2654800" [ 959.004260] env[62974]: _type = "Task" [ 959.004260] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.006331] env[62974]: DEBUG nova.policy [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a642fe375c743b7958ddeb1490a8032', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e0a57dfe83843708e333b70e0cc2bc4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 959.016105] env[62974]: DEBUG oslo_vmware.api [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.100565] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.100853] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.101087] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.101282] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.101450] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.103606] env[62974]: INFO nova.compute.manager [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Terminating instance [ 959.199666] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f909ff08-69ba-46a4-9985-a313404501df tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.284s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.355014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.355545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.355545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.355678] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.356145] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.358271] env[62974]: INFO nova.compute.manager [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Terminating instance [ 959.362704] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.389529] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.430178] env[62974]: INFO nova.compute.resource_tracker [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating resource usage from migration f6781122-3622-4249-8545-448431a998f3 [ 959.433104] env[62974]: DEBUG nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 959.458432] env[62974]: DEBUG nova.network.neutron [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Successfully created port: 920a4859-7d7c-4b5f-bc72-e4c088c41523 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.473293] env[62974]: DEBUG oslo_vmware.api [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Task: {'id': task-2654798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21172} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.473551] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.473732] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 959.473904] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 959.474087] env[62974]: INFO nova.compute.manager [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Took 2.11 seconds to destroy the instance on the hypervisor. [ 959.474327] env[62974]: DEBUG oslo.service.loopingcall [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.474519] env[62974]: DEBUG nova.compute.manager [-] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 959.474614] env[62974]: DEBUG nova.network.neutron [-] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 959.494784] env[62974]: DEBUG oslo_vmware.api [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203798} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.497684] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.497878] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 959.498079] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 959.498256] env[62974]: INFO nova.compute.manager [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Took 1.57 seconds to destroy the instance on the hypervisor. [ 959.498497] env[62974]: DEBUG oslo.service.loopingcall [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.499110] env[62974]: DEBUG nova.compute.manager [-] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 959.499218] env[62974]: DEBUG nova.network.neutron [-] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 959.516485] env[62974]: DEBUG oslo_vmware.api [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Task: {'id': task-2654800, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218815} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.516485] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.516485] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 959.516485] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 959.516485] env[62974]: INFO nova.compute.manager [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Took 1.22 seconds to destroy the instance on the hypervisor. [ 959.516885] env[62974]: DEBUG oslo.service.loopingcall [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.516885] env[62974]: DEBUG nova.compute.manager [-] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 959.516885] env[62974]: DEBUG nova.network.neutron [-] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 959.610019] env[62974]: DEBUG nova.compute.manager [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.610019] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.610019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f52bc0f-d724-40bc-b21e-c11b7df93f09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.617352] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.620636] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac650edc-cfed-46ad-a2e2-ea1648c5b33b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.636020] env[62974]: DEBUG oslo_vmware.api [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 959.636020] env[62974]: value = "task-2654801" [ 959.636020] env[62974]: _type = "Task" [ 959.636020] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.651452] env[62974]: DEBUG oslo_vmware.api [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654801, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.777234] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57115d55-795e-4da0-a9b0-49bc3ed14688 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.786918] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3533ad0-3c1d-444a-9421-d77580ee2f28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.831926] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc6c40f-fd73-4fe1-8b85-f412889facab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.842871] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b666cc-571c-4835-ad79-d3bc51855190 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.862447] env[62974]: DEBUG nova.compute.provider_tree [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.863957] env[62974]: DEBUG nova.compute.manager [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.864262] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.865179] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7121276c-5ecf-4ee2-adbb-ae6db971711c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.873821] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.876071] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96551754-4f76-4c5c-ad82-eeb95ffe1298 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.884453] env[62974]: DEBUG oslo_vmware.api [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 959.884453] env[62974]: value = "task-2654802" [ 959.884453] env[62974]: _type = "Task" [ 959.884453] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.895363] env[62974]: DEBUG oslo_vmware.api [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.144757] env[62974]: DEBUG oslo_vmware.api [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654801, 'name': PowerOffVM_Task, 'duration_secs': 0.196074} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.145055] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.145259] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.145498] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-276fdc4c-959d-4ab2-a132-8dec50f6786a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.225657] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.226046] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.226524] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Deleting the datastore file [datastore2] aa6eb55e-79c0-4e1f-8756-05dff97b06d2 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.229760] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbe2ef9a-50eb-4ff0-9004-d27c6b60ce89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.241076] env[62974]: DEBUG oslo_vmware.api [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for the task: (returnval){ [ 960.241076] env[62974]: value = "task-2654804" [ 960.241076] env[62974]: _type = "Task" [ 960.241076] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.256794] env[62974]: DEBUG oslo_vmware.api [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.263731] env[62974]: DEBUG nova.compute.manager [req-9c4f74ea-239d-43df-9179-31ee95488ebe req-12cb623b-9c4c-4c82-9d4f-c4b5102a1f7d service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Received event network-vif-deleted-bb305260-1683-4681-aea1-92b24514bf2b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 960.264069] env[62974]: INFO nova.compute.manager [req-9c4f74ea-239d-43df-9179-31ee95488ebe req-12cb623b-9c4c-4c82-9d4f-c4b5102a1f7d service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Neutron deleted interface bb305260-1683-4681-aea1-92b24514bf2b; detaching it from the instance and deleting it from the info cache [ 960.264149] env[62974]: DEBUG nova.network.neutron [req-9c4f74ea-239d-43df-9179-31ee95488ebe req-12cb623b-9c4c-4c82-9d4f-c4b5102a1f7d service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.366542] env[62974]: DEBUG nova.scheduler.client.report [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.401087] env[62974]: DEBUG oslo_vmware.api [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654802, 'name': PowerOffVM_Task, 'duration_secs': 0.220868} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.401642] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.401811] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.402143] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78e9d802-e125-4fba-b4c4-c698532bbbff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.452014] env[62974]: DEBUG nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 960.485844] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.485844] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.485844] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleting the datastore file [datastore1] 3df97cea-5a6e-4d7a-b2f3-e02213816e24 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.486413] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-785e0f8c-76da-4f11-a14b-382cb5504775 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.500373] env[62974]: DEBUG oslo_vmware.api [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 960.500373] env[62974]: value = "task-2654806" [ 960.500373] env[62974]: _type = "Task" [ 960.500373] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.504441] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 960.504722] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 960.504882] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 960.505097] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 960.505259] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 960.505418] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 960.505630] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 960.505802] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 960.505981] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 960.506167] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 960.506341] env[62974]: DEBUG nova.virt.hardware [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 960.507813] env[62974]: DEBUG nova.compute.manager [req-833daea2-8492-466a-a022-b6c8321f46ba req-6a25313a-719c-4bc3-a6a9-8bef86e3c9b2 service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Received event network-vif-deleted-ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 960.507960] env[62974]: INFO nova.compute.manager [req-833daea2-8492-466a-a022-b6c8321f46ba req-6a25313a-719c-4bc3-a6a9-8bef86e3c9b2 service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Neutron deleted interface ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0; detaching it from the instance and deleting it from the info cache [ 960.508086] env[62974]: DEBUG nova.network.neutron [req-833daea2-8492-466a-a022-b6c8321f46ba req-6a25313a-719c-4bc3-a6a9-8bef86e3c9b2 service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.510358] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50c6e5e-4527-4a24-8583-5ef30a2725f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.526768] env[62974]: DEBUG oslo_vmware.api [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.531317] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5469b7-0eb2-46f7-a5e0-fdc9d3a4fe47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.732877] env[62974]: DEBUG nova.network.neutron [-] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.752726] env[62974]: DEBUG oslo_vmware.api [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Task: {'id': task-2654804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187671} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.752851] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.752981] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.753803] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.753803] env[62974]: INFO nova.compute.manager [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 960.753803] env[62974]: DEBUG oslo.service.loopingcall [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.754227] env[62974]: DEBUG nova.compute.manager [-] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 960.754227] env[62974]: DEBUG nova.network.neutron [-] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.767282] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62bfa7d6-5d72-45f2-963c-98f454467ce4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.781345] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36961d19-ddfd-4d77-8296-448059942dbd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.819521] env[62974]: DEBUG nova.compute.manager [req-9c4f74ea-239d-43df-9179-31ee95488ebe req-12cb623b-9c4c-4c82-9d4f-c4b5102a1f7d service nova] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Detach interface failed, port_id=bb305260-1683-4681-aea1-92b24514bf2b, reason: Instance 69fb00b3-6a41-4ef5-8876-6548cae31c07 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 960.878862] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.459s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.878862] env[62974]: INFO nova.compute.manager [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Migrating [ 960.879964] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.553s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.880332] env[62974]: DEBUG nova.objects.instance [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lazy-loading 'resources' on Instance uuid e23dbff7-d23e-4909-9b33-67ed15c325e7 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.954320] env[62974]: DEBUG nova.network.neutron [-] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.992263] env[62974]: DEBUG nova.network.neutron [-] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.015312] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79e9a95e-642e-4958-bbd0-b00f45c9c64f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.020688] env[62974]: DEBUG oslo_vmware.api [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188157} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.027309] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.027309] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.027309] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.027309] env[62974]: INFO nova.compute.manager [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Took 1.16 seconds to destroy the instance on the hypervisor. [ 961.027309] env[62974]: DEBUG oslo.service.loopingcall [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.027501] env[62974]: DEBUG nova.compute.manager [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.027501] env[62974]: DEBUG nova.network.neutron [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.038021] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def0912f-2f93-469a-934c-8fb0b4158ff8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.076540] env[62974]: DEBUG nova.compute.manager [req-833daea2-8492-466a-a022-b6c8321f46ba req-6a25313a-719c-4bc3-a6a9-8bef86e3c9b2 service nova] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Detach interface failed, port_id=ccb1f4c6-57f2-4cf0-bf24-e0d65dea09e0, reason: Instance ef54d01a-5d2c-448a-a060-37520de396ca could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 961.235016] env[62974]: INFO nova.compute.manager [-] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Took 1.74 seconds to deallocate network for instance. [ 961.250452] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b91034-fb6f-4ac5-b6ea-d4d4de1c72a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.256933] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113ee028-91e8-443a-816c-6b8c0723cb70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.290976] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b05a432-631d-4ece-ad29-108585b5718e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.300254] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4e0c68-53ce-4141-895a-909bcc1c6279 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.316040] env[62974]: DEBUG nova.compute.provider_tree [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.398744] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.399581] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.400023] env[62974]: DEBUG nova.network.neutron [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.458171] env[62974]: INFO nova.compute.manager [-] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Took 1.98 seconds to deallocate network for instance. [ 961.496322] env[62974]: INFO nova.compute.manager [-] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Took 1.98 seconds to deallocate network for instance. [ 961.535520] env[62974]: DEBUG nova.network.neutron [-] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.742449] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.765317] env[62974]: DEBUG nova.network.neutron [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Successfully updated port: 920a4859-7d7c-4b5f-bc72-e4c088c41523 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.819015] env[62974]: DEBUG nova.scheduler.client.report [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.967155] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.004129] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.032941] env[62974]: DEBUG nova.network.neutron [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.042209] env[62974]: INFO nova.compute.manager [-] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Took 1.29 seconds to deallocate network for instance. [ 962.123391] env[62974]: DEBUG nova.network.neutron [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.274076] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "refresh_cache-0f19241f-1650-41e5-8fe8-828024bf6aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.274076] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "refresh_cache-0f19241f-1650-41e5-8fe8-828024bf6aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.274076] env[62974]: DEBUG nova.network.neutron [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.325042] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.445s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.327899] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.567s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.329732] env[62974]: DEBUG nova.objects.instance [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lazy-loading 'resources' on Instance uuid b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.352301] env[62974]: INFO nova.scheduler.client.report [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleted allocations for instance e23dbff7-d23e-4909-9b33-67ed15c325e7 [ 962.530287] env[62974]: DEBUG nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Received event network-vif-deleted-4bf87ffd-a962-46b0-8d3e-aee290745b3d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 962.530656] env[62974]: DEBUG nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Received event network-vif-deleted-3f524d04-205f-4f35-a868-33609b02e1ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 962.530880] env[62974]: DEBUG nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Received event network-vif-deleted-1461ee04-30d1-4afa-b41b-26e9ea0dc08f {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 962.531083] env[62974]: DEBUG nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Received event network-vif-plugged-920a4859-7d7c-4b5f-bc72-e4c088c41523 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 962.531282] env[62974]: DEBUG oslo_concurrency.lockutils [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] Acquiring lock "0f19241f-1650-41e5-8fe8-828024bf6aaa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.531491] env[62974]: DEBUG oslo_concurrency.lockutils [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.531786] env[62974]: DEBUG oslo_concurrency.lockutils [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.532053] env[62974]: DEBUG nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] No waiting events found dispatching network-vif-plugged-920a4859-7d7c-4b5f-bc72-e4c088c41523 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.532245] env[62974]: WARNING nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Received unexpected event network-vif-plugged-920a4859-7d7c-4b5f-bc72-e4c088c41523 for instance with vm_state building and task_state spawning. [ 962.532448] env[62974]: DEBUG nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Received event network-changed-920a4859-7d7c-4b5f-bc72-e4c088c41523 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 962.532635] env[62974]: DEBUG nova.compute.manager [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Refreshing instance network info cache due to event network-changed-920a4859-7d7c-4b5f-bc72-e4c088c41523. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 962.532815] env[62974]: DEBUG oslo_concurrency.lockutils [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] Acquiring lock "refresh_cache-0f19241f-1650-41e5-8fe8-828024bf6aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.535578] env[62974]: INFO nova.compute.manager [-] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Took 1.51 seconds to deallocate network for instance. [ 962.549880] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.625891] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.804989] env[62974]: DEBUG nova.network.neutron [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.836048] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "7163e48f-8344-4837-bbfd-cbb5741eee5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.836332] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.860183] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fcfb64b8-b904-44a5-b5d2-c57fe91e708b tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "e23dbff7-d23e-4909-9b33-67ed15c325e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.229s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.940579] env[62974]: DEBUG nova.network.neutron [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Updating instance_info_cache with network_info: [{"id": "920a4859-7d7c-4b5f-bc72-e4c088c41523", "address": "fa:16:3e:3d:60:8b", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap920a4859-7d", "ovs_interfaceid": "920a4859-7d7c-4b5f-bc72-e4c088c41523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.041950] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.082535] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c700080-b993-425e-b41e-1e9ac2d94cd5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.091324] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e4c728-f7da-466d-9f3e-9954dc7bab29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.122188] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6363f0f8-16a1-476f-8386-f5f79677bfb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.133868] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d5aed6-e3cb-4522-934f-d8e832be022f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.150134] env[62974]: DEBUG nova.compute.provider_tree [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.345309] env[62974]: DEBUG nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 963.441564] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "refresh_cache-0f19241f-1650-41e5-8fe8-828024bf6aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.441727] env[62974]: DEBUG nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Instance network_info: |[{"id": "920a4859-7d7c-4b5f-bc72-e4c088c41523", "address": "fa:16:3e:3d:60:8b", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap920a4859-7d", "ovs_interfaceid": "920a4859-7d7c-4b5f-bc72-e4c088c41523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 963.442177] env[62974]: DEBUG oslo_concurrency.lockutils [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] Acquired lock "refresh_cache-0f19241f-1650-41e5-8fe8-828024bf6aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.442177] env[62974]: DEBUG nova.network.neutron [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Refreshing network info cache for port 920a4859-7d7c-4b5f-bc72-e4c088c41523 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.443424] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:60:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '920a4859-7d7c-4b5f-bc72-e4c088c41523', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.451049] env[62974]: DEBUG oslo.service.loopingcall [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.452110] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 963.452308] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2655740e-98ba-4e57-8366-aa8b8d1c8422 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.473715] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.473715] env[62974]: value = "task-2654808" [ 963.473715] env[62974]: _type = "Task" [ 963.473715] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.482598] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654808, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.654158] env[62974]: DEBUG nova.scheduler.client.report [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 963.864625] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.983657] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654808, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.135288] env[62974]: DEBUG nova.network.neutron [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Updated VIF entry in instance network info cache for port 920a4859-7d7c-4b5f-bc72-e4c088c41523. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.135681] env[62974]: DEBUG nova.network.neutron [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Updating instance_info_cache with network_info: [{"id": "920a4859-7d7c-4b5f-bc72-e4c088c41523", "address": "fa:16:3e:3d:60:8b", "network": {"id": "bd9eb685-68be-405a-80b9-41b39e668da8", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-164558675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0a57dfe83843708e333b70e0cc2bc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap920a4859-7d", "ovs_interfaceid": "920a4859-7d7c-4b5f-bc72-e4c088c41523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.147312] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aaad2f5-f904-41e9-8d6c-ede762f90371 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.166710] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.839s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.168690] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance '55229db9-9442-4973-a1f2-7762227167a4' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 964.172077] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.523s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.173431] env[62974]: INFO nova.compute.claims [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.191848] env[62974]: INFO nova.scheduler.client.report [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Deleted allocations for instance b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4 [ 964.483288] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654808, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.638870] env[62974]: DEBUG oslo_concurrency.lockutils [req-843fc501-cf65-4abe-b63e-60b806a5ae9c req-58cfa3f5-2936-49a6-ad3e-e74a0143ac58 service nova] Releasing lock "refresh_cache-0f19241f-1650-41e5-8fe8-828024bf6aaa" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.679036] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.680471] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b2b3794-3008-497d-b918-c5468b2f304f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.688217] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 964.688217] env[62974]: value = "task-2654809" [ 964.688217] env[62974]: _type = "Task" [ 964.688217] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.702638] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9635cd22-6601-45a0-abfd-1b392fc87f73 tempest-ImagesTestJSON-242512265 tempest-ImagesTestJSON-242512265-project-member] Lock "b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.791s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.703508] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 964.703704] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance '55229db9-9442-4973-a1f2-7762227167a4' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 964.984546] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654808, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.208924] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 965.209175] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.209359] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.209584] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.209782] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.209946] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 965.210231] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 965.210503] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 965.210592] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 965.210732] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 965.210904] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 965.216151] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ba08861-adf1-4de3-8157-64ad5e7cfb65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.236398] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 965.236398] env[62974]: value = "task-2654810" [ 965.236398] env[62974]: _type = "Task" [ 965.236398] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.246169] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.448753] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45df276b-79e7-4943-9817-90e77fa612e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.457179] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508b2470-34d2-434f-afa4-513b3757b8cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.493714] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2099eeb-d5d3-4bea-86c4-0ca97e52879d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.501376] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654808, 'name': CreateVM_Task, 'duration_secs': 1.757019} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.503724] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.504516] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.504780] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.505161] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.506391] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9848f500-193e-4742-81bb-fe253731155d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.510114] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83b090e6-ac02-4708-bdbc-83078293e27d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.514540] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 965.514540] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526fc381-5d72-e29e-c56e-fca273eeb4f6" [ 965.514540] env[62974]: _type = "Task" [ 965.514540] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.522317] env[62974]: DEBUG nova.compute.provider_tree [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.535587] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526fc381-5d72-e29e-c56e-fca273eeb4f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009841} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.535587] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.535587] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.535587] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.535818] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.535818] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.536388] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-882c6999-51bf-4d4a-af9a-71b2f02ec352 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.543648] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.543873] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.544695] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-074f6bc1-0419-48a6-a2f2-5636c1808e36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.549436] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 965.549436] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a45b33-21e6-6204-40a8-1be8632c99ec" [ 965.549436] env[62974]: _type = "Task" [ 965.549436] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.556910] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a45b33-21e6-6204-40a8-1be8632c99ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.746266] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654810, 'name': ReconfigVM_Task, 'duration_secs': 0.137432} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.746718] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance '55229db9-9442-4973-a1f2-7762227167a4' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 966.031104] env[62974]: DEBUG nova.scheduler.client.report [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.060825] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a45b33-21e6-6204-40a8-1be8632c99ec, 'name': SearchDatastore_Task, 'duration_secs': 0.007746} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.061664] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f00d4a0-28b6-485d-85fa-3b43765c598e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.067990] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 966.067990] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d9c50f-3971-399d-9b17-323276cc9151" [ 966.067990] env[62974]: _type = "Task" [ 966.067990] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.077127] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d9c50f-3971-399d-9b17-323276cc9151, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.257498] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 966.257768] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 966.257956] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 966.258166] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 966.258360] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 966.258457] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 966.258657] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 966.258813] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 966.258973] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 966.260368] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 966.260882] env[62974]: DEBUG nova.virt.hardware [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 966.266486] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Reconfiguring VM instance instance-00000044 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 966.266969] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab61594e-4400-4495-9971-d14689fc6836 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.287135] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 966.287135] env[62974]: value = "task-2654811" [ 966.287135] env[62974]: _type = "Task" [ 966.287135] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.296496] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654811, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.538254] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.538752] env[62974]: DEBUG nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 966.541806] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.179s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.543111] env[62974]: INFO nova.compute.claims [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.584399] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d9c50f-3971-399d-9b17-323276cc9151, 'name': SearchDatastore_Task, 'duration_secs': 0.010924} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.585303] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.585569] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 0f19241f-1650-41e5-8fe8-828024bf6aaa/0f19241f-1650-41e5-8fe8-828024bf6aaa.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.585833] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce5bdb30-c415-4881-88b4-0613e7eea595 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.593036] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 966.593036] env[62974]: value = "task-2654812" [ 966.593036] env[62974]: _type = "Task" [ 966.593036] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.601134] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.800715] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654811, 'name': ReconfigVM_Task, 'duration_secs': 0.166426} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.801082] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Reconfigured VM instance instance-00000044 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 966.802069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68d41ee-3779-407a-bbbd-a1a92d7f774c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.826103] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 55229db9-9442-4973-a1f2-7762227167a4/55229db9-9442-4973-a1f2-7762227167a4.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.826569] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eaefdb54-2b7c-4ba7-a3a8-940e8d1fe2d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.846145] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 966.846145] env[62974]: value = "task-2654813" [ 966.846145] env[62974]: _type = "Task" [ 966.846145] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.856741] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654813, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.048230] env[62974]: DEBUG nova.compute.utils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 967.053276] env[62974]: DEBUG nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 967.054106] env[62974]: DEBUG nova.network.neutron [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 967.103842] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654812, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4594} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.104671] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 0f19241f-1650-41e5-8fe8-828024bf6aaa/0f19241f-1650-41e5-8fe8-828024bf6aaa.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.104671] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.104671] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7d67a70-cafa-4051-9645-501e938902cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.112376] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 967.112376] env[62974]: value = "task-2654814" [ 967.112376] env[62974]: _type = "Task" [ 967.112376] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.118312] env[62974]: DEBUG nova.policy [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84861fd0e88640529eb573045514dff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39e59f58f7c24529bfce4bcc18cc7925', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 967.126478] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.357172] env[62974]: DEBUG oslo_vmware.api [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654813, 'name': ReconfigVM_Task, 'duration_secs': 0.385688} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.357273] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 55229db9-9442-4973-a1f2-7762227167a4/55229db9-9442-4973-a1f2-7762227167a4.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.357489] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance '55229db9-9442-4973-a1f2-7762227167a4' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 967.394208] env[62974]: DEBUG nova.network.neutron [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Successfully created port: abdd2cbf-4f48-49bf-abd0-ab91996860e1 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 967.551064] env[62974]: DEBUG nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 967.626384] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062083} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.626798] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.627806] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0894d4-675c-46ab-b9b4-2969e0b8916a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.655319] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 0f19241f-1650-41e5-8fe8-828024bf6aaa/0f19241f-1650-41e5-8fe8-828024bf6aaa.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.659556] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40faee3f-ad11-4351-b0a4-d4f7e1c28657 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.683334] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 967.683334] env[62974]: value = "task-2654815" [ 967.683334] env[62974]: _type = "Task" [ 967.683334] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.694855] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.862052] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1667e2-39c4-43c1-8796-f600e99d9679 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.865274] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c810d1-4f96-42ff-8418-f01548e8dad7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.894231] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2562d5-d336-4991-8717-4e9f99d8e1b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.898812] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98959f8-118b-4bec-aa40-a2c13a203b45 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.948725] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance '55229db9-9442-4973-a1f2-7762227167a4' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 968.070508] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621972d4-53d1-4626-810a-42bc3445c506 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.070508] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a5de56-b04c-404e-8a73-d46656e4b4a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.070508] env[62974]: DEBUG nova.compute.provider_tree [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.070508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.070508] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.074209] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.074209] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.074209] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.074209] env[62974]: INFO nova.compute.manager [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Terminating instance [ 968.196191] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654815, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.491023] env[62974]: DEBUG nova.scheduler.client.report [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.531842] env[62974]: DEBUG nova.network.neutron [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Port 8c1e40ea-8afa-424a-9c2d-65f7e1179366 binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 968.535858] env[62974]: DEBUG nova.compute.manager [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.536064] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.536943] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8804412f-614d-4548-8839-4b8d68bb51f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.546441] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.547353] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33211f48-eb4d-428c-996e-c4721b768f8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.555268] env[62974]: DEBUG oslo_vmware.api [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 968.555268] env[62974]: value = "task-2654816" [ 968.555268] env[62974]: _type = "Task" [ 968.555268] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.569034] env[62974]: DEBUG nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 968.569217] env[62974]: DEBUG oslo_vmware.api [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.606660] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 968.606899] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.607072] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 968.607280] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.607570] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 968.607739] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 968.607856] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 968.608023] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 968.608476] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 968.609422] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 968.609422] env[62974]: DEBUG nova.virt.hardware [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 968.609662] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b05b90-3b16-4cf2-874e-34170e4e5353 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.619895] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80dc0c0-d128-4611-ad26-4b509fe529a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.694931] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654815, 'name': ReconfigVM_Task, 'duration_secs': 0.569418} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.696320] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 0f19241f-1650-41e5-8fe8-828024bf6aaa/0f19241f-1650-41e5-8fe8-828024bf6aaa.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.696998] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed85fca0-bcae-44d9-b7a9-2e487ae83fc1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.704463] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 968.704463] env[62974]: value = "task-2654817" [ 968.704463] env[62974]: _type = "Task" [ 968.704463] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.714989] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654817, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.882375] env[62974]: DEBUG nova.compute.manager [req-1bd44f9d-beba-4a5a-8c18-ce69e5964bdd req-f906285d-7a7d-4433-bf49-35f14e599543 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Received event network-vif-plugged-abdd2cbf-4f48-49bf-abd0-ab91996860e1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 968.882375] env[62974]: DEBUG oslo_concurrency.lockutils [req-1bd44f9d-beba-4a5a-8c18-ce69e5964bdd req-f906285d-7a7d-4433-bf49-35f14e599543 service nova] Acquiring lock "c79afcfb-25ce-4130-96d5-5148d968e5bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.882375] env[62974]: DEBUG oslo_concurrency.lockutils [req-1bd44f9d-beba-4a5a-8c18-ce69e5964bdd req-f906285d-7a7d-4433-bf49-35f14e599543 service nova] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.882375] env[62974]: DEBUG oslo_concurrency.lockutils [req-1bd44f9d-beba-4a5a-8c18-ce69e5964bdd req-f906285d-7a7d-4433-bf49-35f14e599543 service nova] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.882375] env[62974]: DEBUG nova.compute.manager [req-1bd44f9d-beba-4a5a-8c18-ce69e5964bdd req-f906285d-7a7d-4433-bf49-35f14e599543 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] No waiting events found dispatching network-vif-plugged-abdd2cbf-4f48-49bf-abd0-ab91996860e1 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 968.883699] env[62974]: WARNING nova.compute.manager [req-1bd44f9d-beba-4a5a-8c18-ce69e5964bdd req-f906285d-7a7d-4433-bf49-35f14e599543 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Received unexpected event network-vif-plugged-abdd2cbf-4f48-49bf-abd0-ab91996860e1 for instance with vm_state building and task_state spawning. [ 968.999904] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.999904] env[62974]: DEBUG nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 969.001683] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.612s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.002516] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.007688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.264s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.007688] env[62974]: DEBUG nova.objects.instance [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lazy-loading 'resources' on Instance uuid 69fb00b3-6a41-4ef5-8876-6548cae31c07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.051725] env[62974]: INFO nova.scheduler.client.report [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted allocations for instance e11408df-466c-4101-b0cc-3621cda78a45 [ 969.064040] env[62974]: DEBUG nova.network.neutron [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Successfully updated port: abdd2cbf-4f48-49bf-abd0-ab91996860e1 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 969.070364] env[62974]: DEBUG oslo_vmware.api [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654816, 'name': PowerOffVM_Task, 'duration_secs': 0.228971} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.070778] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.070860] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.071154] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a530939f-b88e-42b1-ae4f-b3ebfadd3994 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.144124] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.144443] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.144736] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleting the datastore file [datastore2] c002aec9-4fdf-45c9-9ef6-d196c4891e19 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.146850] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33ed8d07-91a7-49cd-ab54-b4b64963134a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.154411] env[62974]: DEBUG oslo_vmware.api [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 969.154411] env[62974]: value = "task-2654819" [ 969.154411] env[62974]: _type = "Task" [ 969.154411] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.164129] env[62974]: DEBUG oslo_vmware.api [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654819, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.214922] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654817, 'name': Rename_Task, 'duration_secs': 0.17782} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.215214] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.215456] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98fbf83b-0df9-4a93-801d-7a76deda59b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.223011] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 969.223011] env[62974]: value = "task-2654820" [ 969.223011] env[62974]: _type = "Task" [ 969.223011] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.233374] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.508981] env[62974]: DEBUG nova.compute.utils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.514801] env[62974]: DEBUG nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 969.514981] env[62974]: DEBUG nova.network.neutron [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 969.566257] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a04f8ba-b171-4943-8bc3-1c563a622dfe tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "e11408df-466c-4101-b0cc-3621cda78a45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.690s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.567277] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "55229db9-9442-4973-a1f2-7762227167a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.567821] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.567821] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.568884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "refresh_cache-c79afcfb-25ce-4130-96d5-5148d968e5bd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.569026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "refresh_cache-c79afcfb-25ce-4130-96d5-5148d968e5bd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.569158] env[62974]: DEBUG nova.network.neutron [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.604523] env[62974]: DEBUG nova.policy [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a86bbc98ec50467792b3c6a6cedc790b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14dd4a9a77ad40458d40bb82ac4b90a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 969.668632] env[62974]: DEBUG oslo_vmware.api [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654819, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251279} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.668887] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.669143] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.669361] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.669550] env[62974]: INFO nova.compute.manager [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Took 1.13 seconds to destroy the instance on the hypervisor. [ 969.669843] env[62974]: DEBUG oslo.service.loopingcall [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.671023] env[62974]: DEBUG nova.compute.manager [-] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.671023] env[62974]: DEBUG nova.network.neutron [-] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.740451] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654820, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.830236] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df3250d-c4c3-4233-beea-2b22383fd70b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.838880] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7a9bd6-f3ad-45ee-965e-6b00c38b4e5a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.875248] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425b7fae-342b-4378-b15b-dc91ce8cd8a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.886019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2264fa6-c1fa-47df-bdf7-602e809088da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.904098] env[62974]: DEBUG nova.compute.provider_tree [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.020392] env[62974]: DEBUG nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.086392] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "dca952df-dac9-4502-948b-24ac6fb939f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.086638] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "dca952df-dac9-4502-948b-24ac6fb939f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.237153] env[62974]: DEBUG oslo_vmware.api [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654820, 'name': PowerOnVM_Task, 'duration_secs': 0.604207} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.237502] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 970.237721] env[62974]: INFO nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Took 9.79 seconds to spawn the instance on the hypervisor. [ 970.238090] env[62974]: DEBUG nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 970.238773] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2d5b2d-b2aa-48a6-af58-15e98976003b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.243506] env[62974]: DEBUG nova.network.neutron [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.435644] env[62974]: ERROR nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [req-ee265cd5-571a-4ee4-b225-bb03c9b448a2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ee265cd5-571a-4ee4-b225-bb03c9b448a2"}]} [ 970.438800] env[62974]: DEBUG nova.network.neutron [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Successfully created port: cd9af61c-b640-4853-97d9-5989ce177f57 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.460299] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 970.477009] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 970.477269] env[62974]: DEBUG nova.compute.provider_tree [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.489608] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 970.505011] env[62974]: DEBUG nova.network.neutron [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Updating instance_info_cache with network_info: [{"id": "abdd2cbf-4f48-49bf-abd0-ab91996860e1", "address": "fa:16:3e:9a:ea:0c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabdd2cbf-4f", "ovs_interfaceid": "abdd2cbf-4f48-49bf-abd0-ab91996860e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.510689] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 970.589767] env[62974]: DEBUG nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 970.620366] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.620680] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.620789] env[62974]: DEBUG nova.network.neutron [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 970.763468] env[62974]: INFO nova.compute.manager [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Took 24.83 seconds to build instance. [ 970.808165] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702c6eb8-9423-41cc-959b-ffa4587268ea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.816973] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0671b0-2874-44a4-94b4-03df3238e64b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.855542] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4c7aaa-c071-4a8b-ac28-c98eb7902a1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.867157] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536c219e-c478-4aa7-90a2-804afa494cdb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.884044] env[62974]: DEBUG nova.compute.provider_tree [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.919728] env[62974]: DEBUG nova.network.neutron [-] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.008850] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "refresh_cache-c79afcfb-25ce-4130-96d5-5148d968e5bd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.008850] env[62974]: DEBUG nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Instance network_info: |[{"id": "abdd2cbf-4f48-49bf-abd0-ab91996860e1", "address": "fa:16:3e:9a:ea:0c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabdd2cbf-4f", "ovs_interfaceid": "abdd2cbf-4f48-49bf-abd0-ab91996860e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 971.009403] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:ea:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abdd2cbf-4f48-49bf-abd0-ab91996860e1', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.020937] env[62974]: DEBUG oslo.service.loopingcall [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.021247] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 971.021475] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b7d4c25-e5fc-48a2-98c9-5bdc39c99972 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.042038] env[62974]: DEBUG nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 971.051046] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.051046] env[62974]: value = "task-2654821" [ 971.051046] env[62974]: _type = "Task" [ 971.051046] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.066595] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654821, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.077890] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.078079] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.078206] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.078391] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.078533] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.078679] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.078881] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.079208] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.079293] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.079430] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.079628] env[62974]: DEBUG nova.virt.hardware [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.080594] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21adfdb0-db4c-43d8-93a7-74de4aade0d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.091420] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62d51b2-ee74-4b7d-8343-45c7df432bc9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.123126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.222223] env[62974]: DEBUG nova.compute.manager [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Received event network-changed-abdd2cbf-4f48-49bf-abd0-ab91996860e1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 971.222423] env[62974]: DEBUG nova.compute.manager [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Refreshing instance network info cache due to event network-changed-abdd2cbf-4f48-49bf-abd0-ab91996860e1. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 971.222642] env[62974]: DEBUG oslo_concurrency.lockutils [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] Acquiring lock "refresh_cache-c79afcfb-25ce-4130-96d5-5148d968e5bd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.222782] env[62974]: DEBUG oslo_concurrency.lockutils [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] Acquired lock "refresh_cache-c79afcfb-25ce-4130-96d5-5148d968e5bd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.222934] env[62974]: DEBUG nova.network.neutron [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Refreshing network info cache for port abdd2cbf-4f48-49bf-abd0-ab91996860e1 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.270444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6f889b49-8ad1-4213-92ab-56d880d52e80 tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.344s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.365023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "0f19241f-1650-41e5-8fe8-828024bf6aaa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.365023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.365023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "0f19241f-1650-41e5-8fe8-828024bf6aaa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.365023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.365710] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.367115] env[62974]: INFO nova.compute.manager [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Terminating instance [ 971.395983] env[62974]: DEBUG nova.network.neutron [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.411169] env[62974]: ERROR nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [req-e3039f69-64f0-4c03-9921-0c75158d6d6b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e3039f69-64f0-4c03-9921-0c75158d6d6b"}]} [ 971.423127] env[62974]: INFO nova.compute.manager [-] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Took 1.75 seconds to deallocate network for instance. [ 971.442876] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 971.460911] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 971.461188] env[62974]: DEBUG nova.compute.provider_tree [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 971.479430] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 971.500201] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 971.572887] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654821, 'name': CreateVM_Task, 'duration_secs': 0.470901} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.575665] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.575818] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.576830] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 971.578061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.578276] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.578516] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 971.578760] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-398515c7-25b9-4489-b173-44bfc97d2eba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.584594] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 971.584594] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52afab90-e29f-fdf3-206c-2e156bdacd18" [ 971.584594] env[62974]: _type = "Task" [ 971.584594] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.595680] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52afab90-e29f-fdf3-206c-2e156bdacd18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.763516] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d3407c-fe97-4e51-a26d-eeab16959c2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.775779] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fb4b53-0399-4a3d-80c9-030c5bf976c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.810550] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590b021f-f05f-43eb-bd32-1628ed6773c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.820034] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c17a38-1af5-411c-9bde-b5a9a656c0de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.834055] env[62974]: DEBUG nova.compute.provider_tree [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 971.873568] env[62974]: DEBUG nova.compute.manager [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 971.874317] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 971.875292] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c04f60-1896-479b-8d8e-96d6207dc59c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.884959] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.885236] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ef1857b-abe4-4228-a9a8-a61c69628bfa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.897037] env[62974]: DEBUG oslo_vmware.api [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 971.897037] env[62974]: value = "task-2654822" [ 971.897037] env[62974]: _type = "Task" [ 971.897037] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.898555] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.908620] env[62974]: DEBUG oslo_vmware.api [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.933333] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.067813] env[62974]: DEBUG nova.network.neutron [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Updated VIF entry in instance network info cache for port abdd2cbf-4f48-49bf-abd0-ab91996860e1. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.067813] env[62974]: DEBUG nova.network.neutron [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Updating instance_info_cache with network_info: [{"id": "abdd2cbf-4f48-49bf-abd0-ab91996860e1", "address": "fa:16:3e:9a:ea:0c", "network": {"id": "90888e4a-6a30-47ea-bb16-b85a59d535ae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1264724209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39e59f58f7c24529bfce4bcc18cc7925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabdd2cbf-4f", "ovs_interfaceid": "abdd2cbf-4f48-49bf-abd0-ab91996860e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.080955] env[62974]: DEBUG nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 972.098628] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52afab90-e29f-fdf3-206c-2e156bdacd18, 'name': SearchDatastore_Task, 'duration_secs': 0.013131} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.099189] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.099959] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.100352] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.100621] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.100912] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.101307] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d7737ac-ff45-46f2-9931-f5ade20472be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.115418] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.115418] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 972.115418] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c3e8fbf-5c4c-4725-9044-f03e72085c0d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.125287] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 972.125287] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe47a7-4fa5-2eca-d363-006a3e1c3fc2" [ 972.125287] env[62974]: _type = "Task" [ 972.125287] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.139491] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe47a7-4fa5-2eca-d363-006a3e1c3fc2, 'name': SearchDatastore_Task, 'duration_secs': 0.010246} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.141206] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4caa1b96-3e4b-4567-9ac0-ce9fd13f27db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.150674] env[62974]: DEBUG nova.network.neutron [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Successfully updated port: cd9af61c-b640-4853-97d9-5989ce177f57 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.152902] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 972.152902] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a3d345-e7c1-c036-31c9-d624dd453c7b" [ 972.152902] env[62974]: _type = "Task" [ 972.152902] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.169014] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a3d345-e7c1-c036-31c9-d624dd453c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.010614} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.169469] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.169897] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c79afcfb-25ce-4130-96d5-5148d968e5bd/c79afcfb-25ce-4130-96d5-5148d968e5bd.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 972.170610] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-718f1f6f-4fd3-4f96-8311-4bd28827993e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.181307] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 972.181307] env[62974]: value = "task-2654823" [ 972.181307] env[62974]: _type = "Task" [ 972.181307] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.191726] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.369289] env[62974]: DEBUG nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 972.369676] env[62974]: DEBUG nova.compute.provider_tree [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 122 to 123 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 972.369867] env[62974]: DEBUG nova.compute.provider_tree [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.406073] env[62974]: DEBUG oslo_vmware.api [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654822, 'name': PowerOffVM_Task, 'duration_secs': 0.209608} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.406791] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.406791] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.406990] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-087e8fc2-3472-4856-9c59-c3e48e3590f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.429830] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b45ff42-49f4-44af-97a2-ef9dc916fb2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.454764] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917f2ac1-4597-479a-a51d-933739586b24 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.464846] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance '55229db9-9442-4973-a1f2-7762227167a4' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 972.518283] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.518524] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.518712] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleting the datastore file [datastore1] 0f19241f-1650-41e5-8fe8-828024bf6aaa {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.519040] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62da6ebe-5762-4bac-b84d-af483accb2d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.529597] env[62974]: DEBUG oslo_vmware.api [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for the task: (returnval){ [ 972.529597] env[62974]: value = "task-2654825" [ 972.529597] env[62974]: _type = "Task" [ 972.529597] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.539784] env[62974]: DEBUG oslo_vmware.api [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654825, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.570051] env[62974]: DEBUG oslo_concurrency.lockutils [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] Releasing lock "refresh_cache-c79afcfb-25ce-4130-96d5-5148d968e5bd" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.570303] env[62974]: DEBUG nova.compute.manager [req-c04c695f-d9e6-451a-97b2-dc18895e17b2 req-801c2754-6a46-4c1a-b9cd-ad74bfe75d19 service nova] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Received event network-vif-deleted-3b0107e6-4f52-40dc-90c3-d21197cbdf34 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 972.599998] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.654874] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-f586f9a6-1288-4aa2-9052-6e9eb74aac5f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.655041] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-f586f9a6-1288-4aa2-9052-6e9eb74aac5f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.655186] env[62974]: DEBUG nova.network.neutron [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.691799] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47591} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.692120] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c79afcfb-25ce-4130-96d5-5148d968e5bd/c79afcfb-25ce-4130-96d5-5148d968e5bd.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.692445] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.692806] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe40fde9-b2c0-4234-8e68-638092937c35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.699962] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 972.699962] env[62974]: value = "task-2654826" [ 972.699962] env[62974]: _type = "Task" [ 972.699962] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.708199] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.879210] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.872s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.881071] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.914s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.881308] env[62974]: DEBUG nova.objects.instance [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lazy-loading 'resources' on Instance uuid ef54d01a-5d2c-448a-a060-37520de396ca {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.898118] env[62974]: INFO nova.scheduler.client.report [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleted allocations for instance 69fb00b3-6a41-4ef5-8876-6548cae31c07 [ 972.972096] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fb518ea1-4e05-4f7d-afe2-166715b38e1e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance '55229db9-9442-4973-a1f2-7762227167a4' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 973.042221] env[62974]: DEBUG oslo_vmware.api [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Task: {'id': task-2654825, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240404} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.042650] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.042931] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.043185] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.043426] env[62974]: INFO nova.compute.manager [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Took 1.17 seconds to destroy the instance on the hypervisor. [ 973.043732] env[62974]: DEBUG oslo.service.loopingcall [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.043985] env[62974]: DEBUG nova.compute.manager [-] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 973.044149] env[62974]: DEBUG nova.network.neutron [-] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 973.198036] env[62974]: DEBUG nova.network.neutron [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.209908] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087146} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.214018] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 973.214018] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfa4889-ec2c-4aa5-8445-f05a8be6d93f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.234648] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] c79afcfb-25ce-4130-96d5-5148d968e5bd/c79afcfb-25ce-4130-96d5-5148d968e5bd.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.237075] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2ca4d98-7238-4749-932e-ae35661dc038 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.259598] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 973.259598] env[62974]: value = "task-2654827" [ 973.259598] env[62974]: _type = "Task" [ 973.259598] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.261544] env[62974]: DEBUG nova.compute.manager [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Received event network-vif-plugged-cd9af61c-b640-4853-97d9-5989ce177f57 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 973.261742] env[62974]: DEBUG oslo_concurrency.lockutils [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] Acquiring lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.261940] env[62974]: DEBUG oslo_concurrency.lockutils [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.263113] env[62974]: DEBUG oslo_concurrency.lockutils [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.263113] env[62974]: DEBUG nova.compute.manager [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] No waiting events found dispatching network-vif-plugged-cd9af61c-b640-4853-97d9-5989ce177f57 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 973.263113] env[62974]: WARNING nova.compute.manager [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Received unexpected event network-vif-plugged-cd9af61c-b640-4853-97d9-5989ce177f57 for instance with vm_state building and task_state spawning. [ 973.263113] env[62974]: DEBUG nova.compute.manager [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Received event network-changed-cd9af61c-b640-4853-97d9-5989ce177f57 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 973.263113] env[62974]: DEBUG nova.compute.manager [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Refreshing instance network info cache due to event network-changed-cd9af61c-b640-4853-97d9-5989ce177f57. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 973.263388] env[62974]: DEBUG oslo_concurrency.lockutils [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] Acquiring lock "refresh_cache-f586f9a6-1288-4aa2-9052-6e9eb74aac5f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.274704] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654827, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.410024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f98f4503-e0dc-4e6a-8edc-b45fd316d71b tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "69fb00b3-6a41-4ef5-8876-6548cae31c07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.005s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.431747] env[62974]: DEBUG nova.network.neutron [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Updating instance_info_cache with network_info: [{"id": "cd9af61c-b640-4853-97d9-5989ce177f57", "address": "fa:16:3e:26:75:64", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd9af61c-b6", "ovs_interfaceid": "cd9af61c-b640-4853-97d9-5989ce177f57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.651160] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e99d4b-795d-4be1-861e-f325f38e3bae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.661066] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8978841c-e25c-45f3-8e74-79e53ba3b666 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.691924] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954a383c-3734-45f3-b988-fcac508e48ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.700266] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a7465c-29ae-4442-aec9-9d31419e4bd2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.714242] env[62974]: DEBUG nova.compute.provider_tree [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.771889] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654827, 'name': ReconfigVM_Task, 'duration_secs': 0.47283} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.773020] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Reconfigured VM instance instance-0000005b to attach disk [datastore1] c79afcfb-25ce-4130-96d5-5148d968e5bd/c79afcfb-25ce-4130-96d5-5148d968e5bd.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.773020] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab6d158e-412f-4a6c-b28d-35ed68bc874b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.780046] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 973.780046] env[62974]: value = "task-2654828" [ 973.780046] env[62974]: _type = "Task" [ 973.780046] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.789074] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654828, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.894329] env[62974]: DEBUG nova.network.neutron [-] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.934688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-f586f9a6-1288-4aa2-9052-6e9eb74aac5f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.934688] env[62974]: DEBUG nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Instance network_info: |[{"id": "cd9af61c-b640-4853-97d9-5989ce177f57", "address": "fa:16:3e:26:75:64", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd9af61c-b6", "ovs_interfaceid": "cd9af61c-b640-4853-97d9-5989ce177f57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 973.934992] env[62974]: DEBUG oslo_concurrency.lockutils [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] Acquired lock "refresh_cache-f586f9a6-1288-4aa2-9052-6e9eb74aac5f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.935256] env[62974]: DEBUG nova.network.neutron [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Refreshing network info cache for port cd9af61c-b640-4853-97d9-5989ce177f57 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.936423] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:75:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd9af61c-b640-4853-97d9-5989ce177f57', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.943844] env[62974]: DEBUG oslo.service.loopingcall [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.946759] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.947731] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2008971f-2da8-43eb-a3b1-47622d8985e0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.969309] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.969309] env[62974]: value = "task-2654829" [ 973.969309] env[62974]: _type = "Task" [ 973.969309] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.977357] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654829, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.172663] env[62974]: DEBUG nova.network.neutron [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Updated VIF entry in instance network info cache for port cd9af61c-b640-4853-97d9-5989ce177f57. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.173037] env[62974]: DEBUG nova.network.neutron [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Updating instance_info_cache with network_info: [{"id": "cd9af61c-b640-4853-97d9-5989ce177f57", "address": "fa:16:3e:26:75:64", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd9af61c-b6", "ovs_interfaceid": "cd9af61c-b640-4853-97d9-5989ce177f57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.217587] env[62974]: DEBUG nova.scheduler.client.report [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.290875] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654828, 'name': Rename_Task, 'duration_secs': 0.151429} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.291481] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.291481] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f90abd6-1e13-4451-a874-285d6d5c4479 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.298436] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 974.298436] env[62974]: value = "task-2654830" [ 974.298436] env[62974]: _type = "Task" [ 974.298436] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.307043] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654830, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.399307] env[62974]: INFO nova.compute.manager [-] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Took 1.36 seconds to deallocate network for instance. [ 974.480230] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654829, 'name': CreateVM_Task, 'duration_secs': 0.393167} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.480403] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.481090] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.481257] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.481592] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.481860] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-787baca8-cb37-4085-991a-1a47fb36d467 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.494807] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 974.494807] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52944d7c-1eab-fb5a-9f37-28a21cfd890a" [ 974.494807] env[62974]: _type = "Task" [ 974.494807] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.503935] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52944d7c-1eab-fb5a-9f37-28a21cfd890a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.675959] env[62974]: DEBUG oslo_concurrency.lockutils [req-5f32d7a1-8740-4652-b74b-1643b7e8936e req-fc1056d2-3685-42fa-beb7-a18d7edffdec service nova] Releasing lock "refresh_cache-f586f9a6-1288-4aa2-9052-6e9eb74aac5f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.722920] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.726951] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.723s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.727817] env[62974]: DEBUG nova.objects.instance [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lazy-loading 'resources' on Instance uuid 92c80524-0fb6-4f28-9a72-bc4ab5793558 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.755964] env[62974]: INFO nova.scheduler.client.report [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Deleted allocations for instance ef54d01a-5d2c-448a-a060-37520de396ca [ 974.811172] env[62974]: DEBUG oslo_vmware.api [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654830, 'name': PowerOnVM_Task, 'duration_secs': 0.507486} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.811350] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 974.811615] env[62974]: INFO nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Took 6.24 seconds to spawn the instance on the hypervisor. [ 974.812088] env[62974]: DEBUG nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.812929] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b48a12-f32f-43cb-9f4a-6b67b69b21b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.906549] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.006564] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52944d7c-1eab-fb5a-9f37-28a21cfd890a, 'name': SearchDatastore_Task, 'duration_secs': 0.010397} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.006886] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.007141] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.007411] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.007556] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.007734] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.007991] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af422af9-e9cb-4443-92da-1a62a82697aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.020269] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.020269] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.020269] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f56935-0690-4f13-94ee-e3e6fc93e157 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.025722] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 975.025722] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5239db25-99c2-57f4-f35b-e63240543741" [ 975.025722] env[62974]: _type = "Task" [ 975.025722] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.034365] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5239db25-99c2-57f4-f35b-e63240543741, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.265497] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f59a108-cc95-4de4-99c0-278c147fd426 tempest-InstanceActionsTestJSON-1196309339 tempest-InstanceActionsTestJSON-1196309339-project-member] Lock "ef54d01a-5d2c-448a-a060-37520de396ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.419s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.305048] env[62974]: DEBUG nova.compute.manager [req-1027898b-350f-464d-bab9-dcd9cf68fcb8 req-c168d873-85a8-4ec6-9248-a72b1533b0b0 service nova] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Received event network-vif-deleted-920a4859-7d7c-4b5f-bc72-e4c088c41523 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 975.336490] env[62974]: INFO nova.compute.manager [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Took 17.71 seconds to build instance. [ 975.355636] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "55229db9-9442-4973-a1f2-7762227167a4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.356416] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.356416] env[62974]: DEBUG nova.compute.manager [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Going to confirm migration 5 {{(pid=62974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 975.484631] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d69f539-1250-4afa-bf39-966f47bd3025 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.493183] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397068d2-e7c4-4d32-a7f4-7956b63d7dc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.526126] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17af34ae-5fe9-45c0-b9e6-3a837bbe6077 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.540403] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4b9d63-32ca-4c1c-ae19-eac9b04eaaed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.544163] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5239db25-99c2-57f4-f35b-e63240543741, 'name': SearchDatastore_Task, 'duration_secs': 0.012609} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.545365] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-742817fc-6b56-4eca-9573-d3a18856d189 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.555471] env[62974]: DEBUG nova.compute.provider_tree [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.560419] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 975.560419] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ba374d-cc58-ffb8-6e3f-0fa010982d16" [ 975.560419] env[62974]: _type = "Task" [ 975.560419] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.570180] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ba374d-cc58-ffb8-6e3f-0fa010982d16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.838345] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7de1376e-d2c7-42d9-983d-68770fef38dd tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.228s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.895637] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.896073] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.942109] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.942109] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.942109] env[62974]: DEBUG nova.network.neutron [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.942109] env[62974]: DEBUG nova.objects.instance [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'info_cache' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.065019] env[62974]: DEBUG nova.scheduler.client.report [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.076484] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ba374d-cc58-ffb8-6e3f-0fa010982d16, 'name': SearchDatastore_Task, 'duration_secs': 0.014068} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.076933] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.077343] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] f586f9a6-1288-4aa2-9052-6e9eb74aac5f/f586f9a6-1288-4aa2-9052-6e9eb74aac5f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.077769] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-079f0d8f-71bc-49e9-b3ee-f016378b2201 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.088024] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 976.088024] env[62974]: value = "task-2654831" [ 976.088024] env[62974]: _type = "Task" [ 976.088024] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.097687] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.399311] env[62974]: DEBUG nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 976.571563] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.845s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.577222] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.027s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.577752] env[62974]: DEBUG nova.objects.instance [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lazy-loading 'resources' on Instance uuid aa6eb55e-79c0-4e1f-8756-05dff97b06d2 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.601532] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494913} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.601817] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] f586f9a6-1288-4aa2-9052-6e9eb74aac5f/f586f9a6-1288-4aa2-9052-6e9eb74aac5f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.602124] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.603145] env[62974]: INFO nova.scheduler.client.report [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Deleted allocations for instance 92c80524-0fb6-4f28-9a72-bc4ab5793558 [ 976.604112] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16f7f352-daf0-4e71-8184-ea3eb911fc4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.615628] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 976.615628] env[62974]: value = "task-2654832" [ 976.615628] env[62974]: _type = "Task" [ 976.615628] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.626901] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.676598] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "c79afcfb-25ce-4130-96d5-5148d968e5bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.677028] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.677321] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "c79afcfb-25ce-4130-96d5-5148d968e5bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.677554] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.677759] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.680853] env[62974]: INFO nova.compute.manager [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Terminating instance [ 976.918337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.126659] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8662d5bc-f139-4171-b146-f1fe3aa0811a tempest-ServerAddressesNegativeTestJSON-567629378 tempest-ServerAddressesNegativeTestJSON-567629378-project-member] Lock "92c80524-0fb6-4f28-9a72-bc4ab5793558" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.329s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.127417] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06734} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.127703] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.128861] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b99f43d-8720-4b56-8417-551dd1b933e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.156393] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] f586f9a6-1288-4aa2-9052-6e9eb74aac5f/f586f9a6-1288-4aa2-9052-6e9eb74aac5f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.163995] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ea1c4e9-022e-43ff-a713-6ce35e2b4e49 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.187409] env[62974]: DEBUG nova.compute.manager [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 977.187757] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.189133] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e550e1b4-19b5-4f4f-805b-8398005d0bf0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.196336] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 977.196336] env[62974]: value = "task-2654833" [ 977.196336] env[62974]: _type = "Task" [ 977.196336] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.203708] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 977.203880] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab64b3fe-e8ee-4fbd-8b93-88d28bdbcc4f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.213304] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654833, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.220235] env[62974]: DEBUG oslo_vmware.api [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 977.220235] env[62974]: value = "task-2654834" [ 977.220235] env[62974]: _type = "Task" [ 977.220235] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.230413] env[62974]: DEBUG oslo_vmware.api [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.313857] env[62974]: DEBUG nova.network.neutron [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.466472] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2979b5b0-d19c-484d-a005-a712166f1db5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.475710] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c02349a-435b-4977-b8b1-bb311fa75c1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.516220] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ade52e0-b2d2-4808-99c3-d31d3d4709fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.525511] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bd5234-2f9c-4e48-9144-d4e69ec3e110 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.540262] env[62974]: DEBUG nova.compute.provider_tree [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 977.708072] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654833, 'name': ReconfigVM_Task, 'duration_secs': 0.312175} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.708372] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Reconfigured VM instance instance-0000005c to attach disk [datastore2] f586f9a6-1288-4aa2-9052-6e9eb74aac5f/f586f9a6-1288-4aa2-9052-6e9eb74aac5f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.709083] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-823d5742-2ee7-4c2b-a28d-47f5e163dc24 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.716339] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 977.716339] env[62974]: value = "task-2654835" [ 977.716339] env[62974]: _type = "Task" [ 977.716339] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.726082] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654835, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.732050] env[62974]: DEBUG oslo_vmware.api [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654834, 'name': PowerOffVM_Task, 'duration_secs': 0.313659} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.732378] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 977.732544] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.732813] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ce4aa99-5dad-4fe3-ac2f-36428c3241fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.819983] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.820281] env[62974]: DEBUG nova.objects.instance [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'migration_context' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.067234] env[62974]: ERROR nova.scheduler.client.report [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] [req-a9f0455e-dbf2-47d0-b873-2e9bffad2627] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a9f0455e-dbf2-47d0-b873-2e9bffad2627"}]} [ 978.085639] env[62974]: DEBUG nova.scheduler.client.report [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 978.102982] env[62974]: DEBUG nova.scheduler.client.report [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 978.103261] env[62974]: DEBUG nova.compute.provider_tree [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.115751] env[62974]: DEBUG nova.scheduler.client.report [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 978.134048] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 978.134283] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 978.134461] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleting the datastore file [datastore1] c79afcfb-25ce-4130-96d5-5148d968e5bd {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.135069] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84410694-60a1-4d9a-8e57-2be9e67685a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.139647] env[62974]: DEBUG nova.scheduler.client.report [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 978.144339] env[62974]: DEBUG oslo_vmware.api [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for the task: (returnval){ [ 978.144339] env[62974]: value = "task-2654837" [ 978.144339] env[62974]: _type = "Task" [ 978.144339] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.153450] env[62974]: DEBUG oslo_vmware.api [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.228964] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654835, 'name': Rename_Task, 'duration_secs': 0.303815} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.231494] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.233735] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d3eb201-c426-4073-9b2b-42909ea67833 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.239642] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 978.239642] env[62974]: value = "task-2654838" [ 978.239642] env[62974]: _type = "Task" [ 978.239642] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.250108] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.323651] env[62974]: DEBUG nova.objects.base [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Object Instance<55229db9-9442-4973-a1f2-7762227167a4> lazy-loaded attributes: info_cache,migration_context {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 978.324596] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b62423b-7194-4b93-8d93-9ae77162de93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.354877] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67598af5-cb6b-4602-8682-b16e027339ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.360885] env[62974]: DEBUG oslo_vmware.api [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 978.360885] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522b8a25-d769-e0f7-d687-bfab2aa6fbcc" [ 978.360885] env[62974]: _type = "Task" [ 978.360885] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.369959] env[62974]: DEBUG oslo_vmware.api [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522b8a25-d769-e0f7-d687-bfab2aa6fbcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.382729] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90023c3-d6bf-4852-89fe-f264dedccbb8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.390421] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e186de5-61af-4f08-8b0b-ad0ee835bcd8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.420835] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a862278-fd03-4544-8821-8bdca3abac2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.429104] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df5f90b-7d86-42db-855b-94daf551bbde {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.452724] env[62974]: DEBUG nova.compute.provider_tree [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.659359] env[62974]: DEBUG oslo_vmware.api [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Task: {'id': task-2654837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192094} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.659686] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.660471] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.660471] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.660471] env[62974]: INFO nova.compute.manager [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Took 1.47 seconds to destroy the instance on the hypervisor. [ 978.660471] env[62974]: DEBUG oslo.service.loopingcall [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.660724] env[62974]: DEBUG nova.compute.manager [-] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 978.660813] env[62974]: DEBUG nova.network.neutron [-] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 978.750432] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654838, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.876624] env[62974]: DEBUG oslo_vmware.api [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522b8a25-d769-e0f7-d687-bfab2aa6fbcc, 'name': SearchDatastore_Task, 'duration_secs': 0.008345} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.876929] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.956371] env[62974]: DEBUG nova.scheduler.client.report [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.975834] env[62974]: DEBUG nova.compute.manager [req-3530fb7c-f8e5-40f7-a035-c8824d670b20 req-4c7b15ab-9a6b-4ed6-bd44-5df4a9386303 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Received event network-vif-deleted-abdd2cbf-4f48-49bf-abd0-ab91996860e1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 978.975834] env[62974]: INFO nova.compute.manager [req-3530fb7c-f8e5-40f7-a035-c8824d670b20 req-4c7b15ab-9a6b-4ed6-bd44-5df4a9386303 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Neutron deleted interface abdd2cbf-4f48-49bf-abd0-ab91996860e1; detaching it from the instance and deleting it from the info cache [ 978.975834] env[62974]: DEBUG nova.network.neutron [req-3530fb7c-f8e5-40f7-a035-c8824d670b20 req-4c7b15ab-9a6b-4ed6-bd44-5df4a9386303 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.251346] env[62974]: DEBUG oslo_vmware.api [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654838, 'name': PowerOnVM_Task, 'duration_secs': 0.705594} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.251624] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.251893] env[62974]: INFO nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Took 8.21 seconds to spawn the instance on the hypervisor. [ 979.252029] env[62974]: DEBUG nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.252785] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccbc2e1-a164-4ec0-a7b1-2d376a9f4fb0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.452227] env[62974]: DEBUG nova.network.neutron [-] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.460735] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.884s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.464292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.422s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.464292] env[62974]: DEBUG nova.objects.instance [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'resources' on Instance uuid 3df97cea-5a6e-4d7a-b2f3-e02213816e24 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.477866] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f24c88e7-a688-4ba6-9436-9f892685b359 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.489759] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f060550b-8ef1-4fb0-823a-ea7dc64a865b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.500807] env[62974]: INFO nova.scheduler.client.report [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Deleted allocations for instance aa6eb55e-79c0-4e1f-8756-05dff97b06d2 [ 979.525833] env[62974]: DEBUG nova.compute.manager [req-3530fb7c-f8e5-40f7-a035-c8824d670b20 req-4c7b15ab-9a6b-4ed6-bd44-5df4a9386303 service nova] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Detach interface failed, port_id=abdd2cbf-4f48-49bf-abd0-ab91996860e1, reason: Instance c79afcfb-25ce-4130-96d5-5148d968e5bd could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 979.770991] env[62974]: INFO nova.compute.manager [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Took 20.43 seconds to build instance. [ 979.954427] env[62974]: INFO nova.compute.manager [-] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Took 1.29 seconds to deallocate network for instance. [ 980.009494] env[62974]: DEBUG oslo_concurrency.lockutils [None req-90b2c90b-b5d0-4800-bade-669a326c7001 tempest-ServerMetadataNegativeTestJSON-1171032697 tempest-ServerMetadataNegativeTestJSON-1171032697-project-member] Lock "aa6eb55e-79c0-4e1f-8756-05dff97b06d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.909s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.186426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d501de6c-4d67-4c51-b461-3c8789c18c9d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.196114] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23bd77d-0f81-4027-b5b4-d272353d72c2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.230410] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c969d36-64b5-4f02-829e-98a64a5f9cb3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.239087] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea76505-2140-40d1-beea-a4f69da044c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.253634] env[62974]: DEBUG nova.compute.provider_tree [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.272797] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5b370c21-fed0-4a7d-b8fa-364eb689f10d tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.941s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.358485] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8d83ff-e274-4708-a2c8-a696195668d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.365987] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc38447-ce8b-4cff-8e63-dc128993fb7b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Suspending the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 980.366270] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-1d052547-6e4f-4255-a451-986d5162ad7c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.372688] env[62974]: DEBUG oslo_vmware.api [None req-5bc38447-ce8b-4cff-8e63-dc128993fb7b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 980.372688] env[62974]: value = "task-2654839" [ 980.372688] env[62974]: _type = "Task" [ 980.372688] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.382160] env[62974]: DEBUG oslo_vmware.api [None req-5bc38447-ce8b-4cff-8e63-dc128993fb7b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654839, 'name': SuspendVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.461130] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.756915] env[62974]: DEBUG nova.scheduler.client.report [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 980.885543] env[62974]: DEBUG oslo_vmware.api [None req-5bc38447-ce8b-4cff-8e63-dc128993fb7b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654839, 'name': SuspendVM_Task} progress is 37%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.265538] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.801s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.267894] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.403s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.270313] env[62974]: INFO nova.compute.claims [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.289037] env[62974]: INFO nova.scheduler.client.report [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted allocations for instance 3df97cea-5a6e-4d7a-b2f3-e02213816e24 [ 981.383477] env[62974]: DEBUG oslo_vmware.api [None req-5bc38447-ce8b-4cff-8e63-dc128993fb7b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654839, 'name': SuspendVM_Task, 'duration_secs': 0.750767} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.383755] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5bc38447-ce8b-4cff-8e63-dc128993fb7b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Suspended the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 981.383937] env[62974]: DEBUG nova.compute.manager [None req-5bc38447-ce8b-4cff-8e63-dc128993fb7b tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.384716] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec867e3-19f8-436b-9627-41795705ad50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.798810] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d1687cb-cee6-4ec4-8cec-4bb2026e6fdf tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "3df97cea-5a6e-4d7a-b2f3-e02213816e24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.443s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.517183] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf9c1f4-1fd4-4319-9aec-83ddfb94eba2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.525827] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975abf10-f55d-4a58-ac33-73dcc3aee01e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.559454] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72ba030-e0aa-4f7e-aa09-17746b53d6e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.568576] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c05270d-f606-4013-beb8-60d4cf832859 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.583668] env[62974]: DEBUG nova.compute.provider_tree [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.754476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.754476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.754476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.754476] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.754714] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.755768] env[62974]: INFO nova.compute.manager [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Terminating instance [ 983.088219] env[62974]: DEBUG nova.scheduler.client.report [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.260209] env[62974]: DEBUG nova.compute.manager [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 983.260437] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.261600] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba796574-294f-4d10-bd70-fb9691ab9310 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.269905] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.270166] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-980a9ae1-8d6a-4461-812a-5baaf60e74a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.593278] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.593814] env[62974]: DEBUG nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 983.596827] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.474s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.598830] env[62974]: INFO nova.compute.claims [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.755364] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.755906] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.110383] env[62974]: DEBUG nova.compute.utils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 984.111881] env[62974]: DEBUG nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 984.112067] env[62974]: DEBUG nova.network.neutron [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 984.176166] env[62974]: DEBUG nova.policy [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33f0be227ad54df8a81c8412b6181092', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e215ba9f69f44945b300d9750e0f34aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 984.257729] env[62974]: DEBUG nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 984.481125] env[62974]: DEBUG nova.network.neutron [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Successfully created port: ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 984.615640] env[62974]: DEBUG nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 984.748347] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "6d6331f3-327a-4f11-973e-37c1a3d9701c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.748521] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.779732] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.854776] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e21515-69b6-4f49-8b5c-b060bb947ad5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.863565] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7979201d-ea6b-4fff-814a-b6677ca49774 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.894244] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486bb6f5-6b9f-45aa-bde7-2c828412e15f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.902146] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fac39c-f3b1-425e-bec4-d2fc78825021 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.915576] env[62974]: DEBUG nova.compute.provider_tree [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.254647] env[62974]: DEBUG nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 985.418896] env[62974]: DEBUG nova.scheduler.client.report [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 985.443020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.443020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.443020] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleting the datastore file [datastore2] f586f9a6-1288-4aa2-9052-6e9eb74aac5f {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.443020] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-148eb863-eaa8-4773-97bd-6e4c992a9943 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.449789] env[62974]: DEBUG oslo_vmware.api [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 985.449789] env[62974]: value = "task-2654841" [ 985.449789] env[62974]: _type = "Task" [ 985.449789] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.462745] env[62974]: DEBUG oslo_vmware.api [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.628471] env[62974]: DEBUG nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 985.654174] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 985.654481] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 985.654658] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 985.654845] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 985.654990] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 985.655166] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 985.655382] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 985.655540] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 985.655702] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 985.655862] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 985.656043] env[62974]: DEBUG nova.virt.hardware [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 985.656903] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8db875e-bffa-48a0-a834-2d937cb242d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.665174] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab191961-9088-41b7-8a16-08e060e8a034 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.775879] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.849200] env[62974]: DEBUG nova.compute.manager [req-ec2351b5-d94f-4339-9ace-3e8d4b6ef046 req-8847b995-ecae-4f6f-b15d-abf0882c939b service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Received event network-vif-plugged-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 985.849475] env[62974]: DEBUG oslo_concurrency.lockutils [req-ec2351b5-d94f-4339-9ace-3e8d4b6ef046 req-8847b995-ecae-4f6f-b15d-abf0882c939b service nova] Acquiring lock "7163e48f-8344-4837-bbfd-cbb5741eee5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.849696] env[62974]: DEBUG oslo_concurrency.lockutils [req-ec2351b5-d94f-4339-9ace-3e8d4b6ef046 req-8847b995-ecae-4f6f-b15d-abf0882c939b service nova] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.849867] env[62974]: DEBUG oslo_concurrency.lockutils [req-ec2351b5-d94f-4339-9ace-3e8d4b6ef046 req-8847b995-ecae-4f6f-b15d-abf0882c939b service nova] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.850123] env[62974]: DEBUG nova.compute.manager [req-ec2351b5-d94f-4339-9ace-3e8d4b6ef046 req-8847b995-ecae-4f6f-b15d-abf0882c939b service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] No waiting events found dispatching network-vif-plugged-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 985.850347] env[62974]: WARNING nova.compute.manager [req-ec2351b5-d94f-4339-9ace-3e8d4b6ef046 req-8847b995-ecae-4f6f-b15d-abf0882c939b service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Received unexpected event network-vif-plugged-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 for instance with vm_state building and task_state spawning. [ 985.928848] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.928848] env[62974]: DEBUG nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 985.931504] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.999s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.931805] env[62974]: DEBUG nova.objects.instance [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lazy-loading 'resources' on Instance uuid c002aec9-4fdf-45c9-9ef6-d196c4891e19 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.935941] env[62974]: DEBUG nova.network.neutron [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Successfully updated port: ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 985.963016] env[62974]: DEBUG oslo_vmware.api [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16476} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.963981] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.964076] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.964217] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.964394] env[62974]: INFO nova.compute.manager [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Took 2.70 seconds to destroy the instance on the hypervisor. [ 985.965206] env[62974]: DEBUG oslo.service.loopingcall [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.965206] env[62974]: DEBUG nova.compute.manager [-] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 985.965206] env[62974]: DEBUG nova.network.neutron [-] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 986.437190] env[62974]: DEBUG nova.compute.utils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 986.438901] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.439052] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquired lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.439194] env[62974]: DEBUG nova.network.neutron [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.440208] env[62974]: DEBUG nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 986.440374] env[62974]: DEBUG nova.network.neutron [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 986.484919] env[62974]: DEBUG nova.policy [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b837770f3f74a5fad99c7cc150e9cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '567f64e735384503b6c0172050bdfaf5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 986.689058] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88471367-e3c9-430a-abe9-67cef08504f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.701091] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f583e640-f115-4bc0-a776-33073da96bc5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.734960] env[62974]: DEBUG nova.network.neutron [-] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.736116] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa30ebfb-6208-44f0-a4ba-c6b5c183382f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.746235] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ec48d8-e162-4a01-9ce2-9c81c2d75c22 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.762725] env[62974]: DEBUG nova.compute.provider_tree [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.789318] env[62974]: DEBUG nova.network.neutron [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Successfully created port: a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 986.944701] env[62974]: DEBUG nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 986.994591] env[62974]: DEBUG nova.network.neutron [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.172034] env[62974]: DEBUG nova.network.neutron [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Updating instance_info_cache with network_info: [{"id": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "address": "fa:16:3e:7a:c7:fe", "network": {"id": "c62dd5c1-903c-40ff-88c8-358ffff86c68", "bridge": "br-int", "label": "tempest-ServersTestJSON-1709706442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e215ba9f69f44945b300d9750e0f34aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad0d2fde-02", "ovs_interfaceid": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.239486] env[62974]: INFO nova.compute.manager [-] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Took 1.27 seconds to deallocate network for instance. [ 987.266282] env[62974]: DEBUG nova.scheduler.client.report [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.674094] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Releasing lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.674439] env[62974]: DEBUG nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Instance network_info: |[{"id": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "address": "fa:16:3e:7a:c7:fe", "network": {"id": "c62dd5c1-903c-40ff-88c8-358ffff86c68", "bridge": "br-int", "label": "tempest-ServersTestJSON-1709706442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e215ba9f69f44945b300d9750e0f34aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad0d2fde-02", "ovs_interfaceid": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 987.674901] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:c7:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 987.682459] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Creating folder: Project (e215ba9f69f44945b300d9750e0f34aa). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 987.682850] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.683084] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.683284] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.683464] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.683624] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.685116] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-397ed312-7b30-4248-9f6d-4bc6c8ed2dff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.687198] env[62974]: INFO nova.compute.manager [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Terminating instance [ 987.699224] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Created folder: Project (e215ba9f69f44945b300d9750e0f34aa) in parent group-v535199. [ 987.699591] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Creating folder: Instances. Parent ref: group-v535459. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 987.700218] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-969b2612-192b-4d98-8717-05a464836663 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.711149] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Created folder: Instances in parent group-v535459. [ 987.711414] env[62974]: DEBUG oslo.service.loopingcall [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 987.711599] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 987.711797] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2c655ac-6d81-4a57-8512-8b8fbc50d040 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.733719] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 987.733719] env[62974]: value = "task-2654844" [ 987.733719] env[62974]: _type = "Task" [ 987.733719] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.741595] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654844, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.745874] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.771200] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.773923] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.174s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.775442] env[62974]: INFO nova.compute.claims [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.794333] env[62974]: INFO nova.scheduler.client.report [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleted allocations for instance c002aec9-4fdf-45c9-9ef6-d196c4891e19 [ 987.879664] env[62974]: DEBUG nova.compute.manager [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Received event network-changed-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 987.879877] env[62974]: DEBUG nova.compute.manager [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Refreshing instance network info cache due to event network-changed-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 987.880127] env[62974]: DEBUG oslo_concurrency.lockutils [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] Acquiring lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.880292] env[62974]: DEBUG oslo_concurrency.lockutils [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] Acquired lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.880465] env[62974]: DEBUG nova.network.neutron [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Refreshing network info cache for port ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.959516] env[62974]: DEBUG nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 987.988635] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 987.988883] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.989072] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.989271] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.989430] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.989596] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 987.989798] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 987.989952] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 987.990137] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 987.990299] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 987.990497] env[62974]: DEBUG nova.virt.hardware [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 987.991495] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ed8e4d-4809-4310-917c-b29708c6c258 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.000087] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2eceda-b7c2-49f2-8afb-3b346b63416e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.194498] env[62974]: DEBUG nova.compute.manager [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 988.194498] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.195450] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ab28f2-648f-42fe-b45a-125d34dbee17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.203941] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.204207] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ae0ad8b-811f-4de3-b303-bd464bdb20d8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.210976] env[62974]: DEBUG oslo_vmware.api [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 988.210976] env[62974]: value = "task-2654845" [ 988.210976] env[62974]: _type = "Task" [ 988.210976] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.220024] env[62974]: DEBUG oslo_vmware.api [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.243519] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654844, 'name': CreateVM_Task, 'duration_secs': 0.34818} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.243695] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.244399] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.244569] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.244897] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.245159] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f5c3eb8-ff04-4cf4-9b31-2648f95d8f93 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.249945] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 988.249945] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ff5d6b-579f-ca2f-4345-35cf54c0e498" [ 988.249945] env[62974]: _type = "Task" [ 988.249945] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.258393] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ff5d6b-579f-ca2f-4345-35cf54c0e498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.302257] env[62974]: DEBUG oslo_concurrency.lockutils [None req-44096baa-431d-47c9-aec3-e964dbf8cf25 tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "c002aec9-4fdf-45c9-9ef6-d196c4891e19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.277s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.353488] env[62974]: DEBUG nova.network.neutron [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Successfully updated port: a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.556456] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "3426d512-d54e-4852-8eca-8ba9f5fef418" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.556706] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.556909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "3426d512-d54e-4852-8eca-8ba9f5fef418-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.557098] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.557266] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.559336] env[62974]: INFO nova.compute.manager [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Terminating instance [ 988.577785] env[62974]: DEBUG nova.network.neutron [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Updated VIF entry in instance network info cache for port ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.578120] env[62974]: DEBUG nova.network.neutron [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Updating instance_info_cache with network_info: [{"id": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "address": "fa:16:3e:7a:c7:fe", "network": {"id": "c62dd5c1-903c-40ff-88c8-358ffff86c68", "bridge": "br-int", "label": "tempest-ServersTestJSON-1709706442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e215ba9f69f44945b300d9750e0f34aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad0d2fde-02", "ovs_interfaceid": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.721677] env[62974]: DEBUG oslo_vmware.api [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654845, 'name': PowerOffVM_Task, 'duration_secs': 0.211377} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.722018] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.722148] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.722641] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38329858-588f-410e-9893-ee8b3a0b6796 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.760082] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ff5d6b-579f-ca2f-4345-35cf54c0e498, 'name': SearchDatastore_Task, 'duration_secs': 0.010071} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.760326] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.760596] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.760837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.760978] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.761173] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.761435] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2702a4d-d7c3-427a-bb53-d1513318702f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.770924] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.771176] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.772048] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc799a9e-c0e8-4dc3-b6c0-4abe0768d75b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.778260] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 988.778260] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b21d66-f581-5197-916f-8084803d2f47" [ 988.778260] env[62974]: _type = "Task" [ 988.778260] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.789806] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b21d66-f581-5197-916f-8084803d2f47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.805388] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.805685] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.805890] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Deleting the datastore file [datastore1] c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.806366] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c632edf8-8155-45d0-959e-8dd394415a13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.813930] env[62974]: DEBUG oslo_vmware.api [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 988.813930] env[62974]: value = "task-2654847" [ 988.813930] env[62974]: _type = "Task" [ 988.813930] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.822326] env[62974]: DEBUG oslo_vmware.api [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.859514] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.859514] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.859514] env[62974]: DEBUG nova.network.neutron [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.992076] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f32383f-4a61-4307-a86e-d657fd4c318b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.999838] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e484f8-1a6e-40d2-9229-7e92f3aebe7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.031581] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f001c0-dcfb-4cee-91e0-ffaef567561a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.039614] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679964f8-23d1-4439-8dd7-c5e337a55a52 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.054260] env[62974]: DEBUG nova.compute.provider_tree [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.063164] env[62974]: DEBUG nova.compute.manager [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 989.063371] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.064123] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74d0d4c-35f0-47fd-8c6b-1f1fa7b070d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.071304] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.071560] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef71b4d8-dd01-4320-980c-036563894048 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.077354] env[62974]: DEBUG oslo_vmware.api [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 989.077354] env[62974]: value = "task-2654848" [ 989.077354] env[62974]: _type = "Task" [ 989.077354] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.080546] env[62974]: DEBUG oslo_concurrency.lockutils [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] Releasing lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.080773] env[62974]: DEBUG nova.compute.manager [req-581246d7-27c8-490c-9a9d-0aec83101039 req-fc517bcc-7c4b-481b-b994-61287ba81b60 service nova] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Received event network-vif-deleted-cd9af61c-b640-4853-97d9-5989ce177f57 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 989.290090] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b21d66-f581-5197-916f-8084803d2f47, 'name': SearchDatastore_Task, 'duration_secs': 0.009824} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.290899] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6017871-3f00-4bcc-8556-26b97f5c2f17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.296598] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 989.296598] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dccfec-a8f2-058c-07d9-4754f8ba1784" [ 989.296598] env[62974]: _type = "Task" [ 989.296598] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.304952] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dccfec-a8f2-058c-07d9-4754f8ba1784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.323022] env[62974]: DEBUG oslo_vmware.api [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143014} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.323022] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.323022] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.323022] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.323238] env[62974]: INFO nova.compute.manager [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 989.323343] env[62974]: DEBUG oslo.service.loopingcall [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.323604] env[62974]: DEBUG nova.compute.manager [-] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.323707] env[62974]: DEBUG nova.network.neutron [-] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.400877] env[62974]: DEBUG nova.network.neutron [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.557790] env[62974]: DEBUG nova.scheduler.client.report [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.589708] env[62974]: DEBUG oslo_vmware.api [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654848, 'name': PowerOffVM_Task, 'duration_secs': 0.206707} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.589977] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.590355] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.590860] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b59b246-91bd-4ac6-b038-83638ee7313d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.606858] env[62974]: DEBUG nova.network.neutron [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Updating instance_info_cache with network_info: [{"id": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "address": "fa:16:3e:61:55:4e", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e1899a-69", "ovs_interfaceid": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.706023] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.706162] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.706346] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleting the datastore file [datastore1] 3426d512-d54e-4852-8eca-8ba9f5fef418 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.706619] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-310e6923-496d-47c1-b445-293574ec3922 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.716559] env[62974]: DEBUG oslo_vmware.api [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for the task: (returnval){ [ 989.716559] env[62974]: value = "task-2654850" [ 989.716559] env[62974]: _type = "Task" [ 989.716559] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.727486] env[62974]: DEBUG oslo_vmware.api [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.808782] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dccfec-a8f2-058c-07d9-4754f8ba1784, 'name': SearchDatastore_Task, 'duration_secs': 0.009497} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.809417] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.809818] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 7163e48f-8344-4837-bbfd-cbb5741eee5d/7163e48f-8344-4837-bbfd-cbb5741eee5d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.810478] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f137b283-bd8b-4022-b17a-2c1d0dd88d41 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.818746] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 989.818746] env[62974]: value = "task-2654851" [ 989.818746] env[62974]: _type = "Task" [ 989.818746] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.828605] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.908507] env[62974]: DEBUG nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Received event network-vif-plugged-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 989.908743] env[62974]: DEBUG oslo_concurrency.lockutils [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] Acquiring lock "dca952df-dac9-4502-948b-24ac6fb939f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.908947] env[62974]: DEBUG oslo_concurrency.lockutils [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] Lock "dca952df-dac9-4502-948b-24ac6fb939f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.909667] env[62974]: DEBUG oslo_concurrency.lockutils [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] Lock "dca952df-dac9-4502-948b-24ac6fb939f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.909667] env[62974]: DEBUG nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] No waiting events found dispatching network-vif-plugged-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 989.910263] env[62974]: WARNING nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Received unexpected event network-vif-plugged-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac for instance with vm_state building and task_state spawning. [ 989.910547] env[62974]: DEBUG nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Received event network-changed-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 989.910751] env[62974]: DEBUG nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Refreshing instance network info cache due to event network-changed-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 989.910955] env[62974]: DEBUG oslo_concurrency.lockutils [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] Acquiring lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.066024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.066024] env[62974]: DEBUG nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 990.069061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.163s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.069603] env[62974]: DEBUG nova.objects.instance [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lazy-loading 'resources' on Instance uuid 0f19241f-1650-41e5-8fe8-828024bf6aaa {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.108056] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.108757] env[62974]: DEBUG nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Instance network_info: |[{"id": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "address": "fa:16:3e:61:55:4e", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e1899a-69", "ovs_interfaceid": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 990.109303] env[62974]: DEBUG oslo_concurrency.lockutils [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] Acquired lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.111023] env[62974]: DEBUG nova.network.neutron [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Refreshing network info cache for port a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.111228] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:55:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6e1899a-69c5-486d-bfb2-a2f12c06e8ac', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 990.120022] env[62974]: DEBUG oslo.service.loopingcall [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.121593] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 990.121956] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43c99e65-3d59-4546-9d81-c040b6a41b14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.145679] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 990.145679] env[62974]: value = "task-2654852" [ 990.145679] env[62974]: _type = "Task" [ 990.145679] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.155762] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654852, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.230492] env[62974]: DEBUG oslo_vmware.api [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Task: {'id': task-2654850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140988} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.230871] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.231119] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.231353] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.231561] env[62974]: INFO nova.compute.manager [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Took 1.17 seconds to destroy the instance on the hypervisor. [ 990.231881] env[62974]: DEBUG oslo.service.loopingcall [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.232123] env[62974]: DEBUG nova.compute.manager [-] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 990.232246] env[62974]: DEBUG nova.network.neutron [-] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 990.331191] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457699} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.331446] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 7163e48f-8344-4837-bbfd-cbb5741eee5d/7163e48f-8344-4837-bbfd-cbb5741eee5d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.331654] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.331994] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-391574e0-8656-47af-9de3-43d95b0b9159 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.340052] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 990.340052] env[62974]: value = "task-2654853" [ 990.340052] env[62974]: _type = "Task" [ 990.340052] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.349464] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.379311] env[62974]: DEBUG nova.network.neutron [-] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.572996] env[62974]: DEBUG nova.compute.utils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.577549] env[62974]: DEBUG nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 990.577549] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.623848] env[62974]: DEBUG nova.policy [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8a9d68e68144844ad4b6c02916f3e9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6453d2c53e34f6da5e0bf34d846e663', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 990.657919] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654852, 'name': CreateVM_Task, 'duration_secs': 0.472271} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.660442] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.661255] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.661414] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.661913] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 990.663079] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f166bb7-0722-4fb6-adda-c54a849d8f78 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.669871] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 990.669871] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e14da2-940e-f00c-3fc3-5450e722def8" [ 990.669871] env[62974]: _type = "Task" [ 990.669871] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.681237] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e14da2-940e-f00c-3fc3-5450e722def8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.838337] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fe808b-021e-48f2-a7d3-88f9a9c02b5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.853014] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd2d8c0-b43d-4e89-ab55-d0f3f37697b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.856973] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073104} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.857882] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.862582] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c2f091-cc78-4b4d-9229-0ceae3a69382 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.895353] env[62974]: INFO nova.compute.manager [-] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Took 1.57 seconds to deallocate network for instance. [ 990.900721] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4f0937-3c8b-4c27-90dd-500caed98230 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.926223] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 7163e48f-8344-4837-bbfd-cbb5741eee5d/7163e48f-8344-4837-bbfd-cbb5741eee5d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.927327] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c42ef58-884e-42a6-939c-18c9d154c76a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.946149] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31e773b-f8d5-4201-a5d0-8fbf204f787c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.952864] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 990.952864] env[62974]: value = "task-2654854" [ 990.952864] env[62974]: _type = "Task" [ 990.952864] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.965279] env[62974]: DEBUG nova.compute.provider_tree [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.967801] env[62974]: DEBUG nova.network.neutron [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Updated VIF entry in instance network info cache for port a6e1899a-69c5-486d-bfb2-a2f12c06e8ac. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.968258] env[62974]: DEBUG nova.network.neutron [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Updating instance_info_cache with network_info: [{"id": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "address": "fa:16:3e:61:55:4e", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e1899a-69", "ovs_interfaceid": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.973455] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654854, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.986514] env[62974]: DEBUG nova.network.neutron [-] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.053934] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Successfully created port: 40646b9d-b80e-40c3-9130-dcb5916cb108 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.077450] env[62974]: DEBUG nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 991.181877] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e14da2-940e-f00c-3fc3-5450e722def8, 'name': SearchDatastore_Task, 'duration_secs': 0.079764} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.182230] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.182548] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.182820] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.183021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.183157] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.183415] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89a999c2-df83-45e9-aefc-d45ad627a7a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.193595] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.193785] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.194535] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a18ee714-5cf9-4fa5-91d4-0fe5729b56f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.201364] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 991.201364] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5263e462-ce52-e77c-7e23-7aeaada8fba1" [ 991.201364] env[62974]: _type = "Task" [ 991.201364] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.210399] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5263e462-ce52-e77c-7e23-7aeaada8fba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.395458] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Successfully created port: f00dab5c-4be5-45af-a966-24a2317d5c0c {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.428601] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.463299] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654854, 'name': ReconfigVM_Task, 'duration_secs': 0.294244} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.463573] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 7163e48f-8344-4837-bbfd-cbb5741eee5d/7163e48f-8344-4837-bbfd-cbb5741eee5d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.465916] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba2f6724-745e-4f4e-adb9-0c796dbd2362 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.471587] env[62974]: DEBUG nova.scheduler.client.report [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.477252] env[62974]: DEBUG oslo_concurrency.lockutils [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] Releasing lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.477252] env[62974]: DEBUG nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Received event network-vif-deleted-5ef50dc0-edb6-41e4-b27b-22e996c326b4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 991.477453] env[62974]: INFO nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Neutron deleted interface 5ef50dc0-edb6-41e4-b27b-22e996c326b4; detaching it from the instance and deleting it from the info cache [ 991.477629] env[62974]: DEBUG nova.network.neutron [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.479677] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 991.479677] env[62974]: value = "task-2654855" [ 991.479677] env[62974]: _type = "Task" [ 991.479677] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.490443] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654855, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.490802] env[62974]: INFO nova.compute.manager [-] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Took 1.26 seconds to deallocate network for instance. [ 991.611339] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Successfully created port: dc3ff6b9-4b12-45cf-b797-2d0daee5530a {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.711723] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5263e462-ce52-e77c-7e23-7aeaada8fba1, 'name': SearchDatastore_Task, 'duration_secs': 0.01075} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.712525] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11db243a-caaa-49e8-81cc-bfc2bad8039e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.718404] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 991.718404] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269551c-ae99-303d-ee1a-ca73670eb55b" [ 991.718404] env[62974]: _type = "Task" [ 991.718404] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.726493] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269551c-ae99-303d-ee1a-ca73670eb55b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.898348] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-18489c02-5958-431f-aede-f554d0d785ed-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.898720] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-18489c02-5958-431f-aede-f554d0d785ed-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.899118] env[62974]: DEBUG nova.objects.instance [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'flavor' on Instance uuid 18489c02-5958-431f-aede-f554d0d785ed {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.980908] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.983843] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.065s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.985021] env[62974]: INFO nova.compute.claims [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.987480] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23822617-674e-44f3-a68b-551267dd94af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.995974] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.999186] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654855, 'name': Rename_Task, 'duration_secs': 0.194807} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.000281] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.000585] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e366e373-176e-4eb6-8c11-15984cd1b2fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.004600] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2717d34-932d-48b5-baf7-84109f4641b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.015361] env[62974]: INFO nova.scheduler.client.report [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Deleted allocations for instance 0f19241f-1650-41e5-8fe8-828024bf6aaa [ 992.022943] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 992.022943] env[62974]: value = "task-2654856" [ 992.022943] env[62974]: _type = "Task" [ 992.022943] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.037555] env[62974]: DEBUG nova.compute.manager [req-5bb8b31a-a560-4c88-9f1d-25266430eb2b req-d92a0a6f-f1ba-48b8-86a1-95f100bd0140 service nova] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Detach interface failed, port_id=5ef50dc0-edb6-41e4-b27b-22e996c326b4, reason: Instance c1d0b90c-aa1c-485d-850d-a1495feac7c9 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 992.040826] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.088969] env[62974]: DEBUG nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 992.117542] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 992.117932] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.118206] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 992.118519] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.118747] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 992.118981] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 992.119300] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 992.119577] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 992.119827] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 992.120070] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 992.120320] env[62974]: DEBUG nova.virt.hardware [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 992.121534] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9720cec3-578f-4e78-a586-c1f842ec0c65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.133475] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8d466e-8d90-4ca4-857f-4e6210a041dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.138496] env[62974]: DEBUG nova.compute.manager [req-b3747004-4d2b-450a-868b-cffd6764ab36 req-80966faf-8409-44ad-985d-aadc6b8eb14d service nova] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Received event network-vif-deleted-0576c111-5b07-4ceb-be4b-78e565bd0313 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 992.228771] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5269551c-ae99-303d-ee1a-ca73670eb55b, 'name': SearchDatastore_Task, 'duration_secs': 0.010619} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.229462] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.229740] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.230017] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b562d744-5e20-4149-b504-4efdc65817e0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.237381] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 992.237381] env[62974]: value = "task-2654857" [ 992.237381] env[62974]: _type = "Task" [ 992.237381] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.246216] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.479016] env[62974]: DEBUG nova.objects.instance [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'pci_requests' on Instance uuid 18489c02-5958-431f-aede-f554d0d785ed {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.523685] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d0bbcf49-7c98-476b-8d08-f5b80ec78fcc tempest-ImagesOneServerNegativeTestJSON-348199828 tempest-ImagesOneServerNegativeTestJSON-348199828-project-member] Lock "0f19241f-1650-41e5-8fe8-828024bf6aaa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.161s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.535478] env[62974]: DEBUG oslo_vmware.api [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654856, 'name': PowerOnVM_Task, 'duration_secs': 0.504629} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.535760] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 992.535969] env[62974]: INFO nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Took 6.91 seconds to spawn the instance on the hypervisor. [ 992.536203] env[62974]: DEBUG nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.537197] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e7cd83-9d3a-47ca-946b-121db717659e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.748308] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449989} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.748761] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.748860] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.749069] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1ba077ab-4d49-49bf-86ee-d9256fe93b5b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.756099] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 992.756099] env[62974]: value = "task-2654858" [ 992.756099] env[62974]: _type = "Task" [ 992.756099] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.764251] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.981852] env[62974]: DEBUG nova.objects.base [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Object Instance<18489c02-5958-431f-aede-f554d0d785ed> lazy-loaded attributes: flavor,pci_requests {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 992.982176] env[62974]: DEBUG nova.network.neutron [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 993.041969] env[62974]: DEBUG nova.policy [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 993.062609] env[62974]: INFO nova.compute.manager [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Took 29.21 seconds to build instance. [ 993.132069] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Successfully updated port: 40646b9d-b80e-40c3-9130-dcb5916cb108 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.255692] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e516ade-0bac-4728-a6da-00ecec21e980 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.266776] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073633} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.268606] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.269375] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7d8440-be20-4c2a-9843-497a275a9b4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.272378] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69eb1cea-b100-4eb3-a929-623a4e3eae70 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.296954] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.325036] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a4dc200-ddff-4475-b8eb-8dafdd625afe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.341184] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d268db80-eb57-4188-bb9f-879596bbc0b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.351352] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982d3335-c1d8-4208-a0ed-90afee04fa73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.355723] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 993.355723] env[62974]: value = "task-2654859" [ 993.355723] env[62974]: _type = "Task" [ 993.355723] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.356532] env[62974]: DEBUG nova.network.neutron [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Successfully created port: 8c509c6a-a36e-4112-997d-b730dd15b165 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.369492] env[62974]: DEBUG nova.compute.provider_tree [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.378456] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654859, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.564969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7f1724f2-1657-4106-8c2f-b1af40e5650c tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.728s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.871010] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654859, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.872958] env[62974]: DEBUG nova.scheduler.client.report [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.277148] env[62974]: DEBUG nova.compute.manager [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-vif-plugged-40646b9d-b80e-40c3-9130-dcb5916cb108 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.277423] env[62974]: DEBUG oslo_concurrency.lockutils [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] Acquiring lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.277767] env[62974]: DEBUG oslo_concurrency.lockutils [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.277998] env[62974]: DEBUG oslo_concurrency.lockutils [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.280970] env[62974]: DEBUG nova.compute.manager [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] No waiting events found dispatching network-vif-plugged-40646b9d-b80e-40c3-9130-dcb5916cb108 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 994.281189] env[62974]: WARNING nova.compute.manager [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received unexpected event network-vif-plugged-40646b9d-b80e-40c3-9130-dcb5916cb108 for instance with vm_state building and task_state spawning. [ 994.281352] env[62974]: DEBUG nova.compute.manager [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-changed-40646b9d-b80e-40c3-9130-dcb5916cb108 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.281506] env[62974]: DEBUG nova.compute.manager [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Refreshing instance network info cache due to event network-changed-40646b9d-b80e-40c3-9130-dcb5916cb108. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 994.281702] env[62974]: DEBUG oslo_concurrency.lockutils [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] Acquiring lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.281938] env[62974]: DEBUG oslo_concurrency.lockutils [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] Acquired lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.282112] env[62974]: DEBUG nova.network.neutron [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Refreshing network info cache for port 40646b9d-b80e-40c3-9130-dcb5916cb108 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.370393] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654859, 'name': ReconfigVM_Task, 'duration_secs': 0.942652} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.370729] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Reconfigured VM instance instance-0000005e to attach disk [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.373022] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dad8e0da-0706-417a-817d-7aa145ed9ced {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.379515] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.380023] env[62974]: DEBUG nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 994.382744] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 994.382744] env[62974]: value = "task-2654860" [ 994.382744] env[62974]: _type = "Task" [ 994.382744] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.383212] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 15.506s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.394114] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654860, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.822101] env[62974]: DEBUG nova.network.neutron [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Successfully updated port: 8c509c6a-a36e-4112-997d-b730dd15b165 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 994.855976] env[62974]: DEBUG nova.network.neutron [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.885285] env[62974]: DEBUG nova.compute.utils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 994.886759] env[62974]: DEBUG nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 994.886923] env[62974]: DEBUG nova.network.neutron [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.906219] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654860, 'name': Rename_Task, 'duration_secs': 0.159133} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.906219] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.906219] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9dcfb58-1cb9-4fbc-8c7f-dd005fa0df36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.915019] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 994.915019] env[62974]: value = "task-2654861" [ 994.915019] env[62974]: _type = "Task" [ 994.915019] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.929233] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654861, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.041787] env[62974]: DEBUG nova.compute.manager [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Received event network-changed-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 995.042975] env[62974]: DEBUG nova.compute.manager [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Refreshing instance network info cache due to event network-changed-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 995.043936] env[62974]: DEBUG oslo_concurrency.lockutils [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] Acquiring lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.045101] env[62974]: DEBUG oslo_concurrency.lockutils [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] Acquired lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.045101] env[62974]: DEBUG nova.network.neutron [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Refreshing network info cache for port ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.151519] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa31c463-bde3-4ece-82d5-46049ea60f31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.164495] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9123de1-29c8-4e31-aa11-0ff206781885 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.202675] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce8e68a-a805-4512-a805-81c23e12cfda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.211672] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b762c4b8-6da1-4cc9-bf91-d62c2f44de0c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.228365] env[62974]: DEBUG nova.compute.provider_tree [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.231255] env[62974]: DEBUG nova.policy [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f1bb5be437e42e38b80df35193d784a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28bc1945aba64a2ea67745b0d417b9ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 995.265036] env[62974]: DEBUG nova.network.neutron [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.328117] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.328393] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.328931] env[62974]: DEBUG nova.network.neutron [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.390498] env[62974]: DEBUG nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 995.425444] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654861, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.603471] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Successfully updated port: f00dab5c-4be5-45af-a966-24a2317d5c0c {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.734634] env[62974]: DEBUG nova.scheduler.client.report [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.741609] env[62974]: DEBUG nova.network.neutron [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Successfully created port: c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.769696] env[62974]: DEBUG oslo_concurrency.lockutils [req-f8f66a2a-e65b-4da6-937f-9ec0f06b5873 req-f7d18bb0-d22f-47c0-8bae-50ec5057c39f service nova] Releasing lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.888373] env[62974]: WARNING nova.network.neutron [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] ad86c457-3431-4c60-bde9-ddba2b588dde already exists in list: networks containing: ['ad86c457-3431-4c60-bde9-ddba2b588dde']. ignoring it [ 995.924906] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654861, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.949120] env[62974]: DEBUG nova.network.neutron [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Updated VIF entry in instance network info cache for port ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.949290] env[62974]: DEBUG nova.network.neutron [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Updating instance_info_cache with network_info: [{"id": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "address": "fa:16:3e:7a:c7:fe", "network": {"id": "c62dd5c1-903c-40ff-88c8-358ffff86c68", "bridge": "br-int", "label": "tempest-ServersTestJSON-1709706442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e215ba9f69f44945b300d9750e0f34aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad0d2fde-02", "ovs_interfaceid": "ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.310226] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received event network-vif-plugged-8c509c6a-a36e-4112-997d-b730dd15b165 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.310475] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.310655] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Lock "18489c02-5958-431f-aede-f554d0d785ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.310825] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Lock "18489c02-5958-431f-aede-f554d0d785ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.310990] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] No waiting events found dispatching network-vif-plugged-8c509c6a-a36e-4112-997d-b730dd15b165 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 996.311234] env[62974]: WARNING nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received unexpected event network-vif-plugged-8c509c6a-a36e-4112-997d-b730dd15b165 for instance with vm_state active and task_state None. [ 996.311406] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received event network-changed-8c509c6a-a36e-4112-997d-b730dd15b165 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.311568] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Refreshing instance network info cache due to event network-changed-8c509c6a-a36e-4112-997d-b730dd15b165. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 996.311706] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Acquiring lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.403411] env[62974]: DEBUG nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 996.425905] env[62974]: DEBUG oslo_vmware.api [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654861, 'name': PowerOnVM_Task, 'duration_secs': 1.338198} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.427929] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.428061] env[62974]: INFO nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Took 8.47 seconds to spawn the instance on the hypervisor. [ 996.428244] env[62974]: DEBUG nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.429439] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe395397-2e78-4ba4-8f64-9cea208870db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.434856] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 996.435092] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.435250] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 996.435432] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.435574] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 996.435717] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 996.435922] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 996.436092] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 996.436260] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 996.436420] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 996.436590] env[62974]: DEBUG nova.virt.hardware [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 996.437425] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d6a3cc-fd38-4939-910e-0d8c0a07ebd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.449615] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9074c0be-15d3-472f-aadf-e695876d9807 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.453745] env[62974]: DEBUG oslo_concurrency.lockutils [req-c256b6c1-fa9b-42c5-90bf-95a8a20bb74f req-0710154c-f97d-4efa-9235-28a959b230cf service nova] Releasing lock "refresh_cache-7163e48f-8344-4837-bbfd-cbb5741eee5d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.472344] env[62974]: DEBUG nova.network.neutron [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8c509c6a-a36e-4112-997d-b730dd15b165", "address": "fa:16:3e:41:86:84", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c509c6a-a3", "ovs_interfaceid": "8c509c6a-a36e-4112-997d-b730dd15b165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.751206] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.367s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.751206] env[62974]: DEBUG nova.compute.manager [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62974) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 996.754038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.293s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.754357] env[62974]: DEBUG nova.objects.instance [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lazy-loading 'resources' on Instance uuid c79afcfb-25ce-4130-96d5-5148d968e5bd {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.943335] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 996.943589] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 996.957630] env[62974]: INFO nova.compute.manager [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Took 25.86 seconds to build instance. [ 996.975149] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.975930] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.976046] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.976258] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Acquired lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.976431] env[62974]: DEBUG nova.network.neutron [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Refreshing network info cache for port 8c509c6a-a36e-4112-997d-b730dd15b165 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.978537] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c53d5c5-0f5b-4e0e-9dc2-8a9db2c2e0ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.997993] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 996.998234] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.998391] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 996.998574] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.998719] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 996.998865] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 996.999650] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 996.999650] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 996.999650] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 996.999650] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 997.000087] env[62974]: DEBUG nova.virt.hardware [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 997.008174] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Reconfiguring VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 997.008749] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caccb420-2c30-434c-ab21-2eef7785c30a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.029201] env[62974]: DEBUG oslo_vmware.api [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 997.029201] env[62974]: value = "task-2654862" [ 997.029201] env[62974]: _type = "Task" [ 997.029201] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.039144] env[62974]: DEBUG oslo_vmware.api [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654862, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.335305] env[62974]: INFO nova.scheduler.client.report [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted allocation for migration f6781122-3622-4249-8545-448431a998f3 [ 997.463024] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.463024] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 997.463024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-98c8e21c-7d34-40c2-a1d3-a997b1e7f6a4 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "dca952df-dac9-4502-948b-24ac6fb939f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.373s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.495455] env[62974]: DEBUG nova.compute.manager [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Received event network-changed-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 997.495669] env[62974]: DEBUG nova.compute.manager [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Refreshing instance network info cache due to event network-changed-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 997.495882] env[62974]: DEBUG oslo_concurrency.lockutils [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] Acquiring lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.496040] env[62974]: DEBUG oslo_concurrency.lockutils [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] Acquired lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.496207] env[62974]: DEBUG nova.network.neutron [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Refreshing network info cache for port a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 997.516577] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99eceea-098f-4f1e-a679-4c50acb75522 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.525673] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e9bb73-a65d-4aec-b269-96e7168c4353 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.562105] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9c4c52-07c1-4fa7-962e-ecde3683752f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.570269] env[62974]: DEBUG oslo_vmware.api [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.573115] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca3160b-a401-4d8f-9315-9b22f708f9d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.586729] env[62974]: DEBUG nova.compute.provider_tree [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.725182] env[62974]: DEBUG nova.network.neutron [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Successfully updated port: c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.844649] env[62974]: DEBUG oslo_concurrency.lockutils [None req-691fb133-1985-4083-81e8-6c78c518b3a0 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 22.489s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.863333] env[62974]: DEBUG nova.network.neutron [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updated VIF entry in instance network info cache for port 8c509c6a-a36e-4112-997d-b730dd15b165. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.863774] env[62974]: DEBUG nova.network.neutron [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8c509c6a-a36e-4112-997d-b730dd15b165", "address": "fa:16:3e:41:86:84", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c509c6a-a3", "ovs_interfaceid": "8c509c6a-a36e-4112-997d-b730dd15b165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.970186] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Successfully updated port: dc3ff6b9-4b12-45cf-b797-2d0daee5530a {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 998.045866] env[62974]: DEBUG oslo_vmware.api [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.092031] env[62974]: DEBUG nova.scheduler.client.report [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.179011] env[62974]: DEBUG nova.objects.instance [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'flavor' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.227155] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.227381] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.227575] env[62974]: DEBUG nova.network.neutron [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.232301] env[62974]: DEBUG nova.network.neutron [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Updated VIF entry in instance network info cache for port a6e1899a-69c5-486d-bfb2-a2f12c06e8ac. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 998.232637] env[62974]: DEBUG nova.network.neutron [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Updating instance_info_cache with network_info: [{"id": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "address": "fa:16:3e:61:55:4e", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e1899a-69", "ovs_interfaceid": "a6e1899a-69c5-486d-bfb2-a2f12c06e8ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.363496] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-vif-plugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.363938] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.363938] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.364277] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.364378] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] No waiting events found dispatching network-vif-plugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 998.364979] env[62974]: WARNING nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received unexpected event network-vif-plugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a for instance with vm_state building and task_state spawning. [ 998.364979] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.364979] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing instance network info cache due to event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 998.365296] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Acquiring lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.366073] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Releasing lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.368262] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-vif-plugged-f00dab5c-4be5-45af-a966-24a2317d5c0c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.368262] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Acquiring lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.368262] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.368262] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.368262] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] No waiting events found dispatching network-vif-plugged-f00dab5c-4be5-45af-a966-24a2317d5c0c {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 998.368262] env[62974]: WARNING nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received unexpected event network-vif-plugged-f00dab5c-4be5-45af-a966-24a2317d5c0c for instance with vm_state building and task_state spawning. [ 998.368262] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-changed-f00dab5c-4be5-45af-a966-24a2317d5c0c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.368262] env[62974]: DEBUG nova.compute.manager [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Refreshing instance network info cache due to event network-changed-f00dab5c-4be5-45af-a966-24a2317d5c0c. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 998.368262] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Acquiring lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.368262] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Acquired lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.368262] env[62974]: DEBUG nova.network.neutron [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Refreshing network info cache for port f00dab5c-4be5-45af-a966-24a2317d5c0c {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 998.463290] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Didn't find any instances for network info cache update. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 998.467021] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.467021] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.467021] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.467021] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.467021] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.467021] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.467021] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 998.467021] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.475931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.546789] env[62974]: DEBUG oslo_vmware.api [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.597924] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.601635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.822s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.603259] env[62974]: INFO nova.compute.claims [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 998.626426] env[62974]: INFO nova.scheduler.client.report [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Deleted allocations for instance c79afcfb-25ce-4130-96d5-5148d968e5bd [ 998.683894] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.684095] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.684271] env[62974]: DEBUG nova.network.neutron [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.684503] env[62974]: DEBUG nova.objects.instance [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'info_cache' on Instance uuid 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.738377] env[62974]: DEBUG oslo_concurrency.lockutils [req-a3bf6a36-17bb-48c0-8d1f-1c1358a5d6ec req-a7327ffb-bfc9-4fb3-a649-899c0543b7e4 service nova] Releasing lock "refresh_cache-dca952df-dac9-4502-948b-24ac6fb939f9" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.764434] env[62974]: DEBUG nova.network.neutron [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.918158] env[62974]: DEBUG nova.network.neutron [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.935451] env[62974]: DEBUG nova.network.neutron [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc699c4dc-40", "ovs_interfaceid": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.968871] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.006414] env[62974]: DEBUG nova.network.neutron [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.044862] env[62974]: DEBUG oslo_vmware.api [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654862, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.135336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e44e737a-d9b7-47a0-a827-01aa02e5235d tempest-ServerDiskConfigTestJSON-265967801 tempest-ServerDiskConfigTestJSON-265967801-project-member] Lock "c79afcfb-25ce-4130-96d5-5148d968e5bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.458s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.187777] env[62974]: DEBUG nova.objects.base [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Object Instance<55229db9-9442-4973-a1f2-7762227167a4> lazy-loaded attributes: flavor,info_cache {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 999.438641] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.438979] env[62974]: DEBUG nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Instance network_info: |[{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc699c4dc-40", "ovs_interfaceid": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 999.439306] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Acquired lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.439570] env[62974]: DEBUG nova.network.neutron [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.441124] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:c2:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c699c4dc-40cf-4eaa-9ba6-5e255a43e01a', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.449776] env[62974]: DEBUG oslo.service.loopingcall [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.449776] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.450987] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39551729-2b23-4169-9e1d-cd3851f0be8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.470406] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.470406] env[62974]: value = "task-2654863" [ 999.470406] env[62974]: _type = "Task" [ 999.470406] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.479582] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654863, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.509743] env[62974]: DEBUG oslo_concurrency.lockutils [req-550c888d-eb6f-4534-960c-b0dcebf51d81 req-06f3629f-a66b-4d87-86a9-3d0442b7ad4d service nova] Releasing lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.510202] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.510370] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.549360] env[62974]: DEBUG oslo_vmware.api [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654862, 'name': ReconfigVM_Task, 'duration_secs': 2.08894} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.549688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.550011] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Reconfigured VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 999.838738] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d7b1d8-fff2-424c-b376-d8370be5b307 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.851031] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a9201a-c2d4-46f5-82e9-b23f1432e281 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.889731] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64393f6c-320f-4301-8433-d5a82aaf182c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.897201] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca9eaf9-62db-4304-abd7-b24f947d97a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.910916] env[62974]: DEBUG nova.compute.provider_tree [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.984581] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654863, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.056598] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7d741301-6c15-4616-8128-c32c7e1d1947 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-18489c02-5958-431f-aede-f554d0d785ed-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.157s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.115024] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1000.306115] env[62974]: DEBUG nova.network.neutron [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [{"id": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "address": "fa:16:3e:6f:f3:98", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1e40ea-8a", "ovs_interfaceid": "8c1e40ea-8afa-424a-9c2d-65f7e1179366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.414453] env[62974]: DEBUG nova.scheduler.client.report [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.483766] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654863, 'name': CreateVM_Task, 'duration_secs': 0.665591} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.484072] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.485248] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.485248] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.485248] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1000.485448] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05536738-e7b0-49aa-94c5-26d02c1792f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.491096] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1000.491096] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d1a25-67b7-cd50-db3e-f5b23fe42289" [ 1000.491096] env[62974]: _type = "Task" [ 1000.491096] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.500126] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d1a25-67b7-cd50-db3e-f5b23fe42289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.553522] env[62974]: DEBUG nova.network.neutron [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updated VIF entry in instance network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.553877] env[62974]: DEBUG nova.network.neutron [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc699c4dc-40", "ovs_interfaceid": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.807425] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-55229db9-9442-4973-a1f2-7762227167a4" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.915191] env[62974]: DEBUG nova.network.neutron [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Updating instance_info_cache with network_info: [{"id": "40646b9d-b80e-40c3-9130-dcb5916cb108", "address": "fa:16:3e:64:47:f2", "network": {"id": "35fdac1d-a5e0-45cc-ac59-55586e432c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-722510883", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40646b9d-b8", "ovs_interfaceid": "40646b9d-b80e-40c3-9130-dcb5916cb108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f00dab5c-4be5-45af-a966-24a2317d5c0c", "address": "fa:16:3e:3e:af:58", "network": {"id": "b521d95a-4aef-460d-a85a-776282a5f972", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-134786512", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00dab5c-4b", "ovs_interfaceid": "f00dab5c-4be5-45af-a966-24a2317d5c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dc3ff6b9-4b12-45cf-b797-2d0daee5530a", "address": "fa:16:3e:ce:67:9b", "network": {"id": "35fdac1d-a5e0-45cc-ac59-55586e432c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-722510883", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3ff6b9-4b", "ovs_interfaceid": "dc3ff6b9-4b12-45cf-b797-2d0daee5530a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.920244] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.920922] env[62974]: DEBUG nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1000.923544] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.148s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.924904] env[62974]: INFO nova.compute.claims [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.004985] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d1a25-67b7-cd50-db3e-f5b23fe42289, 'name': SearchDatastore_Task, 'duration_secs': 0.01151} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.005640] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.005896] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.006212] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.006371] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.006653] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.006931] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d543318-b46a-46c4-a1c0-ad8a3c0efdd2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.016211] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.016381] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.017123] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d8ad509-8b28-41ac-818f-bc3f97c892b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.027542] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1001.027542] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527c0a88-fdfb-5d3c-1408-6b060fc9019a" [ 1001.027542] env[62974]: _type = "Task" [ 1001.027542] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.035690] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527c0a88-fdfb-5d3c-1408-6b060fc9019a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.056367] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Releasing lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.056654] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-vif-plugged-dc3ff6b9-4b12-45cf-b797-2d0daee5530a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1001.056851] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Acquiring lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.057071] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.057238] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.057403] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] No waiting events found dispatching network-vif-plugged-dc3ff6b9-4b12-45cf-b797-2d0daee5530a {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1001.057616] env[62974]: WARNING nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received unexpected event network-vif-plugged-dc3ff6b9-4b12-45cf-b797-2d0daee5530a for instance with vm_state building and task_state spawning. [ 1001.057794] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-changed-dc3ff6b9-4b12-45cf-b797-2d0daee5530a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1001.057949] env[62974]: DEBUG nova.compute.manager [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Refreshing instance network info cache due to event network-changed-dc3ff6b9-4b12-45cf-b797-2d0daee5530a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1001.058131] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Acquiring lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.417111] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Releasing lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.417767] env[62974]: DEBUG nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Instance network_info: |[{"id": "40646b9d-b80e-40c3-9130-dcb5916cb108", "address": "fa:16:3e:64:47:f2", "network": {"id": "35fdac1d-a5e0-45cc-ac59-55586e432c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-722510883", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40646b9d-b8", "ovs_interfaceid": "40646b9d-b80e-40c3-9130-dcb5916cb108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f00dab5c-4be5-45af-a966-24a2317d5c0c", "address": "fa:16:3e:3e:af:58", "network": {"id": "b521d95a-4aef-460d-a85a-776282a5f972", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-134786512", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00dab5c-4b", "ovs_interfaceid": "f00dab5c-4be5-45af-a966-24a2317d5c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dc3ff6b9-4b12-45cf-b797-2d0daee5530a", "address": "fa:16:3e:ce:67:9b", "network": {"id": "35fdac1d-a5e0-45cc-ac59-55586e432c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-722510883", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3ff6b9-4b", "ovs_interfaceid": "dc3ff6b9-4b12-45cf-b797-2d0daee5530a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1001.418148] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Acquired lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.418340] env[62974]: DEBUG nova.network.neutron [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Refreshing network info cache for port dc3ff6b9-4b12-45cf-b797-2d0daee5530a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.423621] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:47:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40646b9d-b80e-40c3-9130-dcb5916cb108', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:af:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f00dab5c-4be5-45af-a966-24a2317d5c0c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:67:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc3ff6b9-4b12-45cf-b797-2d0daee5530a', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.440713] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Creating folder: Project (d6453d2c53e34f6da5e0bf34d846e663). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.445229] env[62974]: DEBUG nova.compute.utils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1001.448342] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-710bbe36-488f-47ed-8de1-f23d1d48bc08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.451296] env[62974]: DEBUG nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1001.451464] env[62974]: DEBUG nova.network.neutron [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1001.465103] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Created folder: Project (d6453d2c53e34f6da5e0bf34d846e663) in parent group-v535199. [ 1001.465103] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Creating folder: Instances. Parent ref: group-v535464. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.465103] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8aa0fec6-ae96-4f23-b168-c78100004aff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.472351] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Created folder: Instances in parent group-v535464. [ 1001.472578] env[62974]: DEBUG oslo.service.loopingcall [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.472756] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.472950] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9e55e61-69a7-4d12-b09e-b37de9f5b1a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.501969] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.501969] env[62974]: value = "task-2654866" [ 1001.501969] env[62974]: _type = "Task" [ 1001.501969] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.509628] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654866, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.533912] env[62974]: DEBUG nova.policy [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49d8e3a243d346e8969ba6f325e7787e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9087d01b1ad748e0a66474953dfe7034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1001.541055] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527c0a88-fdfb-5d3c-1408-6b060fc9019a, 'name': SearchDatastore_Task, 'duration_secs': 0.009043} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.541867] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-653a7fa0-0966-49b0-88b0-3faae097e047 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.547127] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1001.547127] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bfb0ab-65a4-3a2b-9cde-c31ee1492ddf" [ 1001.547127] env[62974]: _type = "Task" [ 1001.547127] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.555450] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bfb0ab-65a4-3a2b-9cde-c31ee1492ddf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.814334] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.814688] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e2cf746-5ccd-4251-8b49-fc35b8a612de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.821176] env[62974]: DEBUG nova.network.neutron [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Updated VIF entry in instance network info cache for port dc3ff6b9-4b12-45cf-b797-2d0daee5530a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.821874] env[62974]: DEBUG nova.network.neutron [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Updating instance_info_cache with network_info: [{"id": "40646b9d-b80e-40c3-9130-dcb5916cb108", "address": "fa:16:3e:64:47:f2", "network": {"id": "35fdac1d-a5e0-45cc-ac59-55586e432c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-722510883", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40646b9d-b8", "ovs_interfaceid": "40646b9d-b80e-40c3-9130-dcb5916cb108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f00dab5c-4be5-45af-a966-24a2317d5c0c", "address": "fa:16:3e:3e:af:58", "network": {"id": "b521d95a-4aef-460d-a85a-776282a5f972", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-134786512", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00dab5c-4b", "ovs_interfaceid": "f00dab5c-4be5-45af-a966-24a2317d5c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dc3ff6b9-4b12-45cf-b797-2d0daee5530a", "address": "fa:16:3e:ce:67:9b", "network": {"id": "35fdac1d-a5e0-45cc-ac59-55586e432c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-722510883", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3ff6b9-4b", "ovs_interfaceid": "dc3ff6b9-4b12-45cf-b797-2d0daee5530a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.828541] env[62974]: DEBUG oslo_vmware.api [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1001.828541] env[62974]: value = "task-2654867" [ 1001.828541] env[62974]: _type = "Task" [ 1001.828541] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.839332] env[62974]: DEBUG oslo_vmware.api [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.945274] env[62974]: DEBUG nova.network.neutron [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Successfully created port: 6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1001.951820] env[62974]: DEBUG nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1002.013027] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654866, 'name': CreateVM_Task, 'duration_secs': 0.484815} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.017093] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.018583] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.018656] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.019027] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1002.019526] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6a65805-04f8-4dff-b6df-809390b0b50f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.025358] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1002.025358] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]522d9ad4-17f1-b2d5-1d68-ecbececc758d" [ 1002.025358] env[62974]: _type = "Task" [ 1002.025358] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.043026] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522d9ad4-17f1-b2d5-1d68-ecbececc758d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.058493] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bfb0ab-65a4-3a2b-9cde-c31ee1492ddf, 'name': SearchDatastore_Task, 'duration_secs': 0.010521} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.058774] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.059097] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.059511] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3406953-10ca-41e0-acb4-a283bfbfd780 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.066396] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1002.066396] env[62974]: value = "task-2654868" [ 1002.066396] env[62974]: _type = "Task" [ 1002.066396] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.080918] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.203651] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6b04a6-8cd9-4e97-ae1c-cad90230b218 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.214428] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05862cf8-3ea1-43dd-b13e-c4490e4a4652 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.248995] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a870ce5c-66f0-418d-8dee-20780f019679 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.258799] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b87437-7920-4575-9fc1-770d789832c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.276400] env[62974]: DEBUG nova.compute.provider_tree [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.324712] env[62974]: DEBUG oslo_concurrency.lockutils [req-df1f26c8-fd38-40ab-ad78-bf008746a3a7 req-d5095309-da11-4f90-8ec4-73547401db9c service nova] Releasing lock "refresh_cache-7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.339380] env[62974]: DEBUG oslo_vmware.api [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654867, 'name': PowerOnVM_Task, 'duration_secs': 0.401754} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.339802] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.340046] env[62974]: DEBUG nova.compute.manager [None req-73edaa53-f10a-47fb-9913-7d5ab5f7783f tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.341063] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ec2320-9288-454d-a842-b5d425a1c580 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.387605] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.387873] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.536765] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]522d9ad4-17f1-b2d5-1d68-ecbececc758d, 'name': SearchDatastore_Task, 'duration_secs': 0.021917} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.537075] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.537309] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.537543] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.537688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.537899] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.538183] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ca5f947-83ff-4c6e-880e-6a45ac1ae63d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.546772] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.546959] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.547718] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bc22f43-7c64-4861-8981-555f23e07cda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.552867] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1002.552867] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52de7fbf-0dfc-ffcf-2dfa-e5ad28f49384" [ 1002.552867] env[62974]: _type = "Task" [ 1002.552867] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.562053] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52de7fbf-0dfc-ffcf-2dfa-e5ad28f49384, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.574995] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.643469] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-18489c02-5958-431f-aede-f554d0d785ed-8c509c6a-a36e-4112-997d-b730dd15b165" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.643749] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-18489c02-5958-431f-aede-f554d0d785ed-8c509c6a-a36e-4112-997d-b730dd15b165" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.782060] env[62974]: DEBUG nova.scheduler.client.report [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.890893] env[62974]: DEBUG nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1002.971901] env[62974]: DEBUG nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1003.007445] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1003.007758] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1003.007919] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1003.008113] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1003.008264] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1003.008410] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1003.008614] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1003.008773] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1003.008935] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1003.009230] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1003.009426] env[62974]: DEBUG nova.virt.hardware [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1003.010416] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67400615-472a-4b38-9d44-0760a38070a8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.019095] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79a19ee-af8c-41ac-9800-af9ccb7e0576 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.067039] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52de7fbf-0dfc-ffcf-2dfa-e5ad28f49384, 'name': SearchDatastore_Task, 'duration_secs': 0.021714} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.067039] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9c415e0-765b-4a5c-87e1-c34e39887a12 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.075685] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1003.075685] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523eacb6-88cb-70db-2c59-bfce02f85c2f" [ 1003.075685] env[62974]: _type = "Task" [ 1003.075685] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.080135] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.093248] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523eacb6-88cb-70db-2c59-bfce02f85c2f, 'name': SearchDatastore_Task, 'duration_secs': 0.010496} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.093539] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.093822] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f/7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1003.094408] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-033edc43-6085-4de8-b6fb-72578d1e7fac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.103894] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1003.103894] env[62974]: value = "task-2654869" [ 1003.103894] env[62974]: _type = "Task" [ 1003.103894] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.856959] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.857374] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.857839] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "55229db9-9442-4973-a1f2-7762227167a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.858057] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.858252] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "55229db9-9442-4973-a1f2-7762227167a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.858424] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.858597] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.861351] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.938s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.861818] env[62974]: DEBUG nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1003.868078] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654869, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.868078] env[62974]: WARNING oslo_vmware.common.loopingcall [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] task run outlasted interval by 0.26296699999999995 sec [ 1003.868674] env[62974]: INFO nova.compute.manager [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Terminating instance [ 1003.870393] env[62974]: DEBUG nova.network.neutron [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Successfully updated port: 6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.871818] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f825151-62db-4f27-a294-a6c75c6f34dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.875071] env[62974]: DEBUG nova.compute.manager [req-6fd91e99-f6ce-42b7-9ed9-d92b361c0bb0 req-681fb86e-b723-43c4-a31b-1cb56c0f8bf2 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Received event network-vif-plugged-6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.875217] env[62974]: DEBUG oslo_concurrency.lockutils [req-6fd91e99-f6ce-42b7-9ed9-d92b361c0bb0 req-681fb86e-b723-43c4-a31b-1cb56c0f8bf2 service nova] Acquiring lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.875460] env[62974]: DEBUG oslo_concurrency.lockutils [req-6fd91e99-f6ce-42b7-9ed9-d92b361c0bb0 req-681fb86e-b723-43c4-a31b-1cb56c0f8bf2 service nova] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.875691] env[62974]: DEBUG oslo_concurrency.lockutils [req-6fd91e99-f6ce-42b7-9ed9-d92b361c0bb0 req-681fb86e-b723-43c4-a31b-1cb56c0f8bf2 service nova] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.875788] env[62974]: DEBUG nova.compute.manager [req-6fd91e99-f6ce-42b7-9ed9-d92b361c0bb0 req-681fb86e-b723-43c4-a31b-1cb56c0f8bf2 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] No waiting events found dispatching network-vif-plugged-6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1003.875945] env[62974]: WARNING nova.compute.manager [req-6fd91e99-f6ce-42b7-9ed9-d92b361c0bb0 req-681fb86e-b723-43c4-a31b-1cb56c0f8bf2 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Received unexpected event network-vif-plugged-6d33ceb1-e623-4a85-bed3-a9dba877fc7b for instance with vm_state building and task_state spawning. [ 1003.876246] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.130s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.876436] env[62974]: DEBUG nova.objects.instance [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'resources' on Instance uuid f586f9a6-1288-4aa2-9052-6e9eb74aac5f {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.894780] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654869, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719108} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.909990] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f/7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.909990] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.910127] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.911146] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.911383] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b300169-7f7f-42b3-aae5-92119823c51e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.913781] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42210cb4-1607-4216-b752-249de96dcc8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.921639] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1003.921639] env[62974]: value = "task-2654870" [ 1003.921639] env[62974]: _type = "Task" [ 1003.921639] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.945791] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Reconfiguring VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1003.949350] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f5c9d81-3cc4-48eb-8f00-eb2795248476 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.968313] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654870, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.969634] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1003.969634] env[62974]: value = "task-2654871" [ 1003.969634] env[62974]: _type = "Task" [ 1003.969634] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.978600] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.371170] env[62974]: DEBUG nova.compute.utils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1004.372748] env[62974]: DEBUG nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1004.372933] env[62974]: DEBUG nova.network.neutron [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1004.382848] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654868, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.387046] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.387207] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.387371] env[62974]: DEBUG nova.network.neutron [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1004.389813] env[62974]: DEBUG nova.compute.manager [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1004.389813] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1004.390833] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6db1920-093b-462b-ad81-c2c135fd3b69 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.399224] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.399515] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25eb73bb-16c8-4176-9385-151c8728f91c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.409729] env[62974]: DEBUG oslo_vmware.api [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1004.409729] env[62974]: value = "task-2654872" [ 1004.409729] env[62974]: _type = "Task" [ 1004.409729] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.415245] env[62974]: DEBUG nova.policy [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82cbd050443849dba65c7c3ccd578590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21909beb1faa4a2c994925764408480f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1004.422496] env[62974]: DEBUG oslo_vmware.api [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.449438] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654870, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168996} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.452345] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1004.453448] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9087e4c-a930-4725-bd6d-9c9c7bf9d703 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.481134] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f/7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.487561] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0c5fcee-be59-4258-8fe6-569e198ecd43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.510674] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.514020] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1004.514020] env[62974]: value = "task-2654873" [ 1004.514020] env[62974]: _type = "Task" [ 1004.514020] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.520797] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.639633] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ff1737-1b93-47d5-84ff-76b5c7cfc724 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.648770] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4473eb-1a97-4b19-ad61-7c56ca3ad5d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.686899] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a91cee4-2cc2-463d-82e5-6f48764a9430 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.694638] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd0de44-1236-4653-91be-c1d69d399e56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.709979] env[62974]: DEBUG nova.compute.provider_tree [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1004.724411] env[62974]: DEBUG nova.network.neutron [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Successfully created port: f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1004.876808] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654868, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.622973} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.877080] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1004.877297] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.877540] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2ad8515-3bc8-48c7-b665-89a3539f6432 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.879695] env[62974]: DEBUG nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1004.887383] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1004.887383] env[62974]: value = "task-2654874" [ 1004.887383] env[62974]: _type = "Task" [ 1004.887383] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.897134] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654874, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.919518] env[62974]: DEBUG oslo_vmware.api [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654872, 'name': PowerOffVM_Task, 'duration_secs': 0.302807} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.919847] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1004.920033] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1004.921030] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-565460c6-8217-44e8-a865-c2193b5734f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.982817] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.023800] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654873, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.025696] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.027929] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.027929] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleting the datastore file [datastore2] 55229db9-9442-4973-a1f2-7762227167a4 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.027929] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d8fa897-b95b-460e-b68f-6b415557e5b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.033387] env[62974]: DEBUG oslo_vmware.api [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1005.033387] env[62974]: value = "task-2654876" [ 1005.033387] env[62974]: _type = "Task" [ 1005.033387] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.042096] env[62974]: DEBUG oslo_vmware.api [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.138058] env[62974]: DEBUG nova.network.neutron [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1005.235721] env[62974]: ERROR nova.scheduler.client.report [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [req-4db30a5c-270d-4168-a9a4-c85498ea329d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4db30a5c-270d-4168-a9a4-c85498ea329d"}]} [ 1005.253946] env[62974]: DEBUG nova.scheduler.client.report [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1005.274416] env[62974]: DEBUG nova.scheduler.client.report [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1005.274664] env[62974]: DEBUG nova.compute.provider_tree [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1005.287573] env[62974]: DEBUG nova.scheduler.client.report [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1005.309747] env[62974]: DEBUG nova.network.neutron [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Updating instance_info_cache with network_info: [{"id": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "address": "fa:16:3e:a6:a5:cd", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d33ceb1-e6", "ovs_interfaceid": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.311627] env[62974]: DEBUG nova.scheduler.client.report [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1005.397843] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654874, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067366} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.398498] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.399195] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2b9a2a-2c55-4f2e-b6a3-a84393910307 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.425218] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.428509] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2abe2e3f-44b9-4ab0-890a-b65270dc4588 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.449136] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1005.449136] env[62974]: value = "task-2654877" [ 1005.449136] env[62974]: _type = "Task" [ 1005.449136] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.451111] env[62974]: DEBUG nova.compute.manager [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Received event network-changed-6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1005.451272] env[62974]: DEBUG nova.compute.manager [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Refreshing instance network info cache due to event network-changed-6d33ceb1-e623-4a85-bed3-a9dba877fc7b. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1005.451459] env[62974]: DEBUG oslo_concurrency.lockutils [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] Acquiring lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.462877] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.483856] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.526505] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654873, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.542454] env[62974]: DEBUG oslo_vmware.api [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144627} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.543397] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1005.543627] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1005.543837] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1005.544052] env[62974]: INFO nova.compute.manager [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1005.544329] env[62974]: DEBUG oslo.service.loopingcall [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.545026] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8acb73-9d9e-47df-8de7-22e386c6c4f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.547392] env[62974]: DEBUG nova.compute.manager [-] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1005.547533] env[62974]: DEBUG nova.network.neutron [-] [instance: 55229db9-9442-4973-a1f2-7762227167a4] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1005.553104] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15eea67-cc2f-4e83-90f9-d2a4085c1e29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.584024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbd4f31-caa0-4fa5-9b97-9f082e37ca94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.590579] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e84a48-ad37-42f1-9340-37e297b106df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.606174] env[62974]: DEBUG nova.compute.provider_tree [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.815381] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.815794] env[62974]: DEBUG nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Instance network_info: |[{"id": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "address": "fa:16:3e:a6:a5:cd", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d33ceb1-e6", "ovs_interfaceid": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1005.816201] env[62974]: DEBUG oslo_concurrency.lockutils [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] Acquired lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.816438] env[62974]: DEBUG nova.network.neutron [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Refreshing network info cache for port 6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.819102] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:a5:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d33ceb1-e623-4a85-bed3-a9dba877fc7b', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1005.827693] env[62974]: DEBUG oslo.service.loopingcall [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.828725] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1005.828987] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-657a8bff-e255-4854-ae6f-56d9683b7f0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.851842] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1005.851842] env[62974]: value = "task-2654878" [ 1005.851842] env[62974]: _type = "Task" [ 1005.851842] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.861912] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654878, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.890389] env[62974]: DEBUG nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1005.921522] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1005.921827] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.922010] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1005.922299] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.922528] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1005.923033] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1005.923033] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1005.923224] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1005.923439] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1005.923709] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1005.924026] env[62974]: DEBUG nova.virt.hardware [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1005.925039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d0f130-a122-4dc2-9e2b-5a6c01f48007 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.933903] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50da6a3a-41b4-405c-8303-dfffbb8a7d01 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.960962] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.982939] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.026538] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654873, 'name': ReconfigVM_Task, 'duration_secs': 1.351713} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.026851] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f/7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.027554] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-891b6a69-d88a-4345-aa6d-e75c995ace9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.034899] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1006.034899] env[62974]: value = "task-2654879" [ 1006.034899] env[62974]: _type = "Task" [ 1006.034899] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.043476] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654879, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.111397] env[62974]: DEBUG nova.scheduler.client.report [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.362911] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654878, 'name': CreateVM_Task, 'duration_secs': 0.371251} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.363060] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1006.363714] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.363977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.364201] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1006.364448] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-600bf9b6-df69-4aa6-a77c-a386ea5dc7e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.368811] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1006.368811] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524704f0-f760-5693-4ee3-de572a5f3814" [ 1006.368811] env[62974]: _type = "Task" [ 1006.368811] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.378974] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524704f0-f760-5693-4ee3-de572a5f3814, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.464509] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654877, 'name': ReconfigVM_Task, 'duration_secs': 0.574206} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.464628] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Reconfigured VM instance instance-00000060 to attach disk [datastore1] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.465150] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ca6cfdc-476a-41a3-8aa2-b2d7393d71b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.471864] env[62974]: DEBUG nova.network.neutron [-] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.473028] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1006.473028] env[62974]: value = "task-2654880" [ 1006.473028] env[62974]: _type = "Task" [ 1006.473028] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.485999] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654880, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.489323] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.545791] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654879, 'name': Rename_Task, 'duration_secs': 0.200968} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.546903] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.547208] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55959d41-33d9-4735-a8e7-4ff97cd747cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.553392] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1006.553392] env[62974]: value = "task-2654881" [ 1006.553392] env[62974]: _type = "Task" [ 1006.553392] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.561970] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.594733] env[62974]: DEBUG nova.network.neutron [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Successfully updated port: f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1006.617686] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.741s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.620925] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.192s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.621252] env[62974]: DEBUG nova.objects.instance [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'resources' on Instance uuid c1d0b90c-aa1c-485d-850d-a1495feac7c9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.637587] env[62974]: DEBUG nova.network.neutron [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Updated VIF entry in instance network info cache for port 6d33ceb1-e623-4a85-bed3-a9dba877fc7b. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1006.638049] env[62974]: DEBUG nova.network.neutron [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Updating instance_info_cache with network_info: [{"id": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "address": "fa:16:3e:a6:a5:cd", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d33ceb1-e6", "ovs_interfaceid": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.642243] env[62974]: INFO nova.scheduler.client.report [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted allocations for instance f586f9a6-1288-4aa2-9052-6e9eb74aac5f [ 1006.881821] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524704f0-f760-5693-4ee3-de572a5f3814, 'name': SearchDatastore_Task, 'duration_secs': 0.009573} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.882126] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.882392] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.882651] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.882874] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.883029] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1006.883312] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffcf5939-e9b9-4104-ad22-cddee4d1edb6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.894863] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1006.895194] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1006.895766] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce96c5e3-a70e-4939-8ef9-cd78a0dd0ecc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.900774] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1006.900774] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5262f5ae-6cb5-5aa2-eba5-85821d16bb5f" [ 1006.900774] env[62974]: _type = "Task" [ 1006.900774] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.908472] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5262f5ae-6cb5-5aa2-eba5-85821d16bb5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.977819] env[62974]: INFO nova.compute.manager [-] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Took 1.43 seconds to deallocate network for instance. [ 1006.988869] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654880, 'name': Rename_Task, 'duration_secs': 0.135374} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.989326] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.989629] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0655b471-bb77-4066-addb-3e0a93bce01c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.994040] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.000395] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1007.000395] env[62974]: value = "task-2654882" [ 1007.000395] env[62974]: _type = "Task" [ 1007.000395] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.007922] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.064785] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654881, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.104489] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.104489] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.104489] env[62974]: DEBUG nova.network.neutron [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.140935] env[62974]: DEBUG oslo_concurrency.lockutils [req-3210436e-acd1-4bd6-b117-8974de735e91 req-5faa0642-aefe-47d2-90c3-91f12ce2e7fa service nova] Releasing lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.150617] env[62974]: DEBUG oslo_concurrency.lockutils [None req-75c75565-6486-4f33-aa1f-b384af119638 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "f586f9a6-1288-4aa2-9052-6e9eb74aac5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.398s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.309880] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dca919-86ff-4dff-a57d-5cfcfcb94eed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.319227] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2579d7ab-5a3a-4bea-bea1-4e9066c7928c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.352260] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09b4161-dbd4-487a-8849-4376a21aa263 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.360928] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19aafa3-5c76-44d2-a19f-4187f0679f0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.375462] env[62974]: DEBUG nova.compute.provider_tree [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.414212] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5262f5ae-6cb5-5aa2-eba5-85821d16bb5f, 'name': SearchDatastore_Task, 'duration_secs': 0.008253} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.416093] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45e7c830-e01e-4e0e-897f-62dd564385a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.421576] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1007.421576] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ebf3a8-a77e-4548-095c-0ee89d9b2098" [ 1007.421576] env[62974]: _type = "Task" [ 1007.421576] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.431749] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ebf3a8-a77e-4548-095c-0ee89d9b2098, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.488279] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.490977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.511414] env[62974]: DEBUG oslo_vmware.api [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654882, 'name': PowerOnVM_Task, 'duration_secs': 0.469069} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.511684] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.511898] env[62974]: INFO nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Took 11.11 seconds to spawn the instance on the hypervisor. [ 1007.512086] env[62974]: DEBUG nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.512863] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5885c620-2fbf-4d15-8556-835ef083a4ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.563871] env[62974]: DEBUG oslo_vmware.api [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654881, 'name': PowerOnVM_Task, 'duration_secs': 0.610537} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.565064] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.565273] env[62974]: INFO nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Took 15.48 seconds to spawn the instance on the hypervisor. [ 1007.565473] env[62974]: DEBUG nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.566796] env[62974]: DEBUG nova.compute.manager [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Received event network-vif-deleted-8c1e40ea-8afa-424a-9c2d-65f7e1179366 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1007.566990] env[62974]: DEBUG nova.compute.manager [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Received event network-vif-plugged-f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1007.567198] env[62974]: DEBUG oslo_concurrency.lockutils [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] Acquiring lock "6d6331f3-327a-4f11-973e-37c1a3d9701c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.567436] env[62974]: DEBUG oslo_concurrency.lockutils [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.567602] env[62974]: DEBUG oslo_concurrency.lockutils [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.567764] env[62974]: DEBUG nova.compute.manager [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] No waiting events found dispatching network-vif-plugged-f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1007.567925] env[62974]: WARNING nova.compute.manager [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Received unexpected event network-vif-plugged-f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 for instance with vm_state building and task_state spawning. [ 1007.568094] env[62974]: DEBUG nova.compute.manager [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Received event network-changed-f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1007.568323] env[62974]: DEBUG nova.compute.manager [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Refreshing instance network info cache due to event network-changed-f60dfb07-76b9-46c7-b661-8dd02b1e8bb7. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1007.568401] env[62974]: DEBUG oslo_concurrency.lockutils [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] Acquiring lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.569421] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2bc012-f32c-4996-bf20-9e2b179efe78 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.636485] env[62974]: DEBUG nova.network.neutron [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1007.751721] env[62974]: DEBUG nova.network.neutron [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Updating instance_info_cache with network_info: [{"id": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "address": "fa:16:3e:87:96:e8", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf60dfb07-76", "ovs_interfaceid": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.879175] env[62974]: DEBUG nova.scheduler.client.report [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.933428] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ebf3a8-a77e-4548-095c-0ee89d9b2098, 'name': SearchDatastore_Task, 'duration_secs': 0.018133} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.933836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.934057] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 11bd6a5d-9590-4aa3-aaf3-99d2ac394553/11bd6a5d-9590-4aa3-aaf3-99d2ac394553.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1007.934351] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5ca83bf-679b-459d-b436-09f84e7252e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.942090] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1007.942090] env[62974]: value = "task-2654883" [ 1007.942090] env[62974]: _type = "Task" [ 1007.942090] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.951399] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654883, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.989271] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.030604] env[62974]: INFO nova.compute.manager [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Took 31.13 seconds to build instance. [ 1008.089934] env[62974]: INFO nova.compute.manager [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Took 35.50 seconds to build instance. [ 1008.254545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.255078] env[62974]: DEBUG nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Instance network_info: |[{"id": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "address": "fa:16:3e:87:96:e8", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf60dfb07-76", "ovs_interfaceid": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1008.255480] env[62974]: DEBUG oslo_concurrency.lockutils [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] Acquired lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.255675] env[62974]: DEBUG nova.network.neutron [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Refreshing network info cache for port f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.257735] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:96:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f60dfb07-76b9-46c7-b661-8dd02b1e8bb7', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1008.268658] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Creating folder: Project (21909beb1faa4a2c994925764408480f). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1008.274228] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14287236-0f50-46ea-b43d-59212cb3134e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.287498] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Created folder: Project (21909beb1faa4a2c994925764408480f) in parent group-v535199. [ 1008.287867] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Creating folder: Instances. Parent ref: group-v535468. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1008.288283] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a15b2c5-d5cd-4fd7-a34b-f31cd97df72b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.298281] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Created folder: Instances in parent group-v535468. [ 1008.298570] env[62974]: DEBUG oslo.service.loopingcall [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.298789] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1008.299039] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4dfd5c0-3a46-4eb9-b841-21f82118908c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.321820] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1008.321820] env[62974]: value = "task-2654886" [ 1008.321820] env[62974]: _type = "Task" [ 1008.321820] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.330758] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654886, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.384286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.386602] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.391s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.386847] env[62974]: DEBUG nova.objects.instance [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lazy-loading 'resources' on Instance uuid 3426d512-d54e-4852-8eca-8ba9f5fef418 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.408756] env[62974]: INFO nova.scheduler.client.report [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Deleted allocations for instance c1d0b90c-aa1c-485d-850d-a1495feac7c9 [ 1008.455129] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654883, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44081} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.459111] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 11bd6a5d-9590-4aa3-aaf3-99d2ac394553/11bd6a5d-9590-4aa3-aaf3-99d2ac394553.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1008.459352] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1008.462544] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02b1a118-a93b-4583-bfa2-f513baef8caa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.465371] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "713b503e-43b5-409c-8086-e6d36850f962" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.465620] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.473627] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1008.473627] env[62974]: value = "task-2654887" [ 1008.473627] env[62974]: _type = "Task" [ 1008.473627] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.482638] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.491462] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.493970] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.507306] env[62974]: DEBUG nova.network.neutron [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Updated VIF entry in instance network info cache for port f60dfb07-76b9-46c7-b661-8dd02b1e8bb7. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.507826] env[62974]: DEBUG nova.network.neutron [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Updating instance_info_cache with network_info: [{"id": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "address": "fa:16:3e:87:96:e8", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf60dfb07-76", "ovs_interfaceid": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.533175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-61f05df8-619c-4028-af91-3fb1ae04b32a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.637s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.591775] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7797ffcb-ad0d-4e7b-8603-39d4bc1d60c9 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.016s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.592067] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.098s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.592297] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.592499] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.592669] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.594887] env[62974]: INFO nova.compute.manager [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Terminating instance [ 1008.833033] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654886, 'name': CreateVM_Task, 'duration_secs': 0.38255} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.833033] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.833236] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.833276] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.833576] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.833825] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3a8a7f1-a291-4815-9e94-00908bd3c311 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.838990] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1008.838990] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b4d48f-e31a-ac9c-a0df-77fe70051588" [ 1008.838990] env[62974]: _type = "Task" [ 1008.838990] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.846608] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b4d48f-e31a-ac9c-a0df-77fe70051588, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.917211] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e42b10e1-7529-4a0a-b17f-2232a98fdf54 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "c1d0b90c-aa1c-485d-850d-a1495feac7c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.234s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.967903] env[62974]: DEBUG nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1008.985938] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165248} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.990780] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.990780] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfc7e98-cba9-4b6c-bf3f-283b55d2a707 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.000246] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.018309] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 11bd6a5d-9590-4aa3-aaf3-99d2ac394553/11bd6a5d-9590-4aa3-aaf3-99d2ac394553.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.021953] env[62974]: DEBUG oslo_concurrency.lockutils [req-1434650e-e327-4ecb-809b-752f5f4326f2 req-270ca249-3dcb-47e2-a1c8-db43ab8959f6 service nova] Releasing lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.022321] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6db6ee4-d82f-40ad-ac65-2ef3c62060a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.044027] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1009.044027] env[62974]: value = "task-2654888" [ 1009.044027] env[62974]: _type = "Task" [ 1009.044027] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.054271] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.098386] env[62974]: DEBUG nova.compute.manager [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.098609] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.099480] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d65db5-0e22-4a08-9214-cb00064d8b52 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.109099] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.109373] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f0be52b-3653-42e0-acc8-f9baae911b8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.116208] env[62974]: DEBUG oslo_vmware.api [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1009.116208] env[62974]: value = "task-2654889" [ 1009.116208] env[62974]: _type = "Task" [ 1009.116208] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.122136] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329e1d6a-83ca-4255-99a1-e84de0636327 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.128308] env[62974]: DEBUG oslo_vmware.api [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.130948] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dbaf0d-b561-4faf-9078-ca7d0c1a5764 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.164653] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac7abb5-1521-4a6f-aea0-e0b903f75b32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.172958] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcaca832-d8a8-45ea-821e-016761143b08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.187600] env[62974]: DEBUG nova.compute.provider_tree [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.349543] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b4d48f-e31a-ac9c-a0df-77fe70051588, 'name': SearchDatastore_Task, 'duration_secs': 0.009318} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.349882] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.350126] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.350361] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.350507] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.351014] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.351014] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfb27666-9475-45af-9941-e1c32e6fc382 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.360721] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.360920] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1009.361678] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53b3f60d-5db0-484b-b97b-abeeb9083017 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.367684] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1009.367684] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]520c0d3e-3988-e124-3bfe-ca0145df5d55" [ 1009.367684] env[62974]: _type = "Task" [ 1009.367684] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.376404] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520c0d3e-3988-e124-3bfe-ca0145df5d55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.490107] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.499657] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.553582] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654888, 'name': ReconfigVM_Task, 'duration_secs': 0.407319} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.554024] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 11bd6a5d-9590-4aa3-aaf3-99d2ac394553/11bd6a5d-9590-4aa3-aaf3-99d2ac394553.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1009.554687] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a292236b-06b2-4ccb-a2ca-c7607692e06c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.560528] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1009.560528] env[62974]: value = "task-2654890" [ 1009.560528] env[62974]: _type = "Task" [ 1009.560528] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.568403] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654890, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.588604] env[62974]: DEBUG nova.compute.manager [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1009.588799] env[62974]: DEBUG nova.compute.manager [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing instance network info cache due to event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1009.589015] env[62974]: DEBUG oslo_concurrency.lockutils [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] Acquiring lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.589172] env[62974]: DEBUG oslo_concurrency.lockutils [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] Acquired lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.589329] env[62974]: DEBUG nova.network.neutron [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1009.633137] env[62974]: DEBUG oslo_vmware.api [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654889, 'name': PowerOffVM_Task, 'duration_secs': 0.193587} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.633532] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.633771] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.634102] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e0eace3-5a80-487f-8238-37730c59980b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.691355] env[62974]: DEBUG nova.scheduler.client.report [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.768662] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.768662] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.768662] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Deleting the datastore file [datastore1] 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.770325] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2818857-dcd5-4663-a6fb-94a88f01a32d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.778029] env[62974]: DEBUG oslo_vmware.api [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1009.778029] env[62974]: value = "task-2654892" [ 1009.778029] env[62974]: _type = "Task" [ 1009.778029] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.784691] env[62974]: DEBUG oslo_vmware.api [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.878664] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]520c0d3e-3988-e124-3bfe-ca0145df5d55, 'name': SearchDatastore_Task, 'duration_secs': 0.015097} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.879584] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3729e646-2fdb-4e69-bb5e-40970aca7c8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.885238] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1009.885238] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d412b8-3585-6bfd-396e-5d2b0ec8cf21" [ 1009.885238] env[62974]: _type = "Task" [ 1009.885238] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.893607] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d412b8-3585-6bfd-396e-5d2b0ec8cf21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.994375] env[62974]: DEBUG oslo_vmware.api [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654871, 'name': ReconfigVM_Task, 'duration_secs': 5.916679} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.994691] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.994745] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Reconfigured VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1010.071584] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654890, 'name': Rename_Task, 'duration_secs': 0.137389} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.072469] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1010.072469] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bdbaac3-1536-47c7-8676-31db2d531625 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.079128] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1010.079128] env[62974]: value = "task-2654893" [ 1010.079128] env[62974]: _type = "Task" [ 1010.079128] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.092867] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654893, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.196016] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.809s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.197991] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.229s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.198184] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.198341] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1010.198890] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.288s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.200225] env[62974]: INFO nova.compute.claims [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1010.203763] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ae848a-9085-4809-9121-b6f228f9e111 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.211969] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e94ab39-2f69-431a-a0ea-0c1ea50b569a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.218096] env[62974]: INFO nova.scheduler.client.report [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Deleted allocations for instance 3426d512-d54e-4852-8eca-8ba9f5fef418 [ 1010.233664] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ec39dd-3d4b-4f48-b4e6-dccafaecaa3c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.247023] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ec4b8b-92ce-4e58-ae0e-de2f3718d8ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.277155] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179610MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1010.277321] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.291142] env[62974]: DEBUG oslo_vmware.api [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185806} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.291411] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.291600] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.291776] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.291990] env[62974]: INFO nova.compute.manager [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1010.292258] env[62974]: DEBUG oslo.service.loopingcall [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.292440] env[62974]: DEBUG nova.compute.manager [-] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1010.292537] env[62974]: DEBUG nova.network.neutron [-] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1010.399030] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d412b8-3585-6bfd-396e-5d2b0ec8cf21, 'name': SearchDatastore_Task, 'duration_secs': 0.00981} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.399030] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.399030] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/6d6331f3-327a-4f11-973e-37c1a3d9701c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1010.400023] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5943f8d8-9252-45af-9f47-6b7df706186f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.408682] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1010.408682] env[62974]: value = "task-2654894" [ 1010.408682] env[62974]: _type = "Task" [ 1010.408682] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.416747] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.504362] env[62974]: DEBUG nova.network.neutron [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updated VIF entry in instance network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.504812] env[62974]: DEBUG nova.network.neutron [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc699c4dc-40", "ovs_interfaceid": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.594126] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654893, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.743157] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3776a6b6-2cf4-45d7-bae7-e07e9f4eeb7f tempest-ServerRescueNegativeTestJSON-1517196064 tempest-ServerRescueNegativeTestJSON-1517196064-project-member] Lock "3426d512-d54e-4852-8eca-8ba9f5fef418" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.186s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.924358] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49457} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.925021] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/6d6331f3-327a-4f11-973e-37c1a3d9701c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.925021] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.925202] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b16e794-871c-46d5-9a0e-a5f48c36906b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.931314] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1010.931314] env[62974]: value = "task-2654895" [ 1010.931314] env[62974]: _type = "Task" [ 1010.931314] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.940045] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.945175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.945397] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.008326] env[62974]: DEBUG oslo_concurrency.lockutils [req-f25053c7-4092-4284-a133-58781c763e30 req-1d30e42f-1017-4df1-827f-4f1d58ee05b4 service nova] Releasing lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.093626] env[62974]: DEBUG oslo_vmware.api [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654893, 'name': PowerOnVM_Task, 'duration_secs': 0.616637} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.093901] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1011.094177] env[62974]: INFO nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1011.094301] env[62974]: DEBUG nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1011.095098] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa31343-49e1-4dcb-9e6b-441e8d0fdc46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.388611] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492c65c1-b64d-4342-8b9a-63feccd040cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.395712] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.396251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.396251] env[62974]: DEBUG nova.network.neutron [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.398039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f6f63e-755b-48d6-8d32-4d95327ba475 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.431777] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffe4e06-56e1-4775-9a73-5031a5c30943 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.442439] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066105} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.443873] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.444650] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d9a0f6-9f1b-44ae-b761-bae5db314968 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.447787] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a843efb8-5931-4094-8427-3c476ee05859 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.451910] env[62974]: DEBUG nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1011.464254] env[62974]: DEBUG nova.compute.provider_tree [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.484637] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/6d6331f3-327a-4f11-973e-37c1a3d9701c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.485351] env[62974]: DEBUG nova.scheduler.client.report [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.488959] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b51db71d-ecb8-4488-a280-3888ec94f5cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.505666] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.307s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.506172] env[62974]: DEBUG nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1011.508782] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.018s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.508960] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.510942] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.021s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.512311] env[62974]: INFO nova.compute.claims [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1011.521236] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1011.521236] env[62974]: value = "task-2654896" [ 1011.521236] env[62974]: _type = "Task" [ 1011.521236] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.531528] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654896, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.541492] env[62974]: INFO nova.scheduler.client.report [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted allocations for instance 55229db9-9442-4973-a1f2-7762227167a4 [ 1011.542810] env[62974]: DEBUG nova.network.neutron [-] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.617911] env[62974]: INFO nova.compute.manager [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Took 26.85 seconds to build instance. [ 1011.626018] env[62974]: DEBUG nova.compute.manager [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received event network-vif-deleted-8c509c6a-a36e-4112-997d-b730dd15b165 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1011.626018] env[62974]: INFO nova.compute.manager [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Neutron deleted interface 8c509c6a-a36e-4112-997d-b730dd15b165; detaching it from the instance and deleting it from the info cache [ 1011.626018] env[62974]: DEBUG nova.network.neutron [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.645864] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.646143] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "18489c02-5958-431f-aede-f554d0d785ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.646353] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.646557] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "18489c02-5958-431f-aede-f554d0d785ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.646784] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "18489c02-5958-431f-aede-f554d0d785ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.648906] env[62974]: INFO nova.compute.manager [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Terminating instance [ 1011.977419] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.016544] env[62974]: DEBUG nova.compute.utils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.021043] env[62974]: DEBUG nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1012.021403] env[62974]: DEBUG nova.network.neutron [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.041974] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654896, 'name': ReconfigVM_Task, 'duration_secs': 0.268118} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.042398] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/6d6331f3-327a-4f11-973e-37c1a3d9701c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.043354] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6687dd84-6b22-4290-9c66-763b3b5748bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.047966] env[62974]: INFO nova.compute.manager [-] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Took 1.76 seconds to deallocate network for instance. [ 1012.052968] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1012.052968] env[62974]: value = "task-2654897" [ 1012.052968] env[62974]: _type = "Task" [ 1012.052968] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.057264] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a71ae417-ea9b-4ec5-92e8-5c2b411fbd63 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "55229db9-9442-4973-a1f2-7762227167a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.199s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.069563] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654897, 'name': Rename_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.111205] env[62974]: DEBUG nova.policy [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c69e5ea97264d57978ddcb94ef4bc41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43dc876c8a2346c7bca249407fb7fed8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1012.124764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-95da17d8-54f2-4af7-ae9b-6606e198d8be tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.369s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.125272] env[62974]: DEBUG oslo_concurrency.lockutils [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] Acquiring lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.155529] env[62974]: DEBUG nova.compute.manager [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1012.155822] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.157170] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c5c8de-69dd-47b6-ae01-107f46be24f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.167238] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.167524] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80880ff9-58a2-4f0a-8fae-ca7f2ab94a8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.173593] env[62974]: DEBUG oslo_vmware.api [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1012.173593] env[62974]: value = "task-2654898" [ 1012.173593] env[62974]: _type = "Task" [ 1012.173593] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.184371] env[62974]: DEBUG oslo_vmware.api [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.193642] env[62974]: INFO nova.network.neutron [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Port 8c509c6a-a36e-4112-997d-b730dd15b165 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1012.193998] env[62974]: DEBUG nova.network.neutron [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [{"id": "7a680703-498d-42ed-9269-736752f5f38e", "address": "fa:16:3e:5e:d5:aa", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a680703-49", "ovs_interfaceid": "7a680703-498d-42ed-9269-736752f5f38e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.441708] env[62974]: DEBUG nova.network.neutron [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Successfully created port: 298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.521715] env[62974]: DEBUG nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1012.561926] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.575292] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654897, 'name': Rename_Task, 'duration_secs': 0.144071} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.575558] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.576641] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bf523f9-b3f2-49ab-91e5-39754b33df64 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.585134] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1012.585134] env[62974]: value = "task-2654899" [ 1012.585134] env[62974]: _type = "Task" [ 1012.585134] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.599721] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.686498] env[62974]: DEBUG oslo_vmware.api [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654898, 'name': PowerOffVM_Task, 'duration_secs': 0.305758} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.686775] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.686941] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.690147] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-681ff81c-9a64-41ff-b1b9-6200e5ea523a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.698175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.742182] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e520955b-d1f1-4c30-b6d4-911cd4a79867 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.751362] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5a7c18-54df-471c-8e8e-c15897c7cf9e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.796325] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b7a2fa-cad8-48cc-93d6-2519c95518f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.799068] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.799269] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.799444] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleting the datastore file [datastore1] 18489c02-5958-431f-aede-f554d0d785ed {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.799711] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2436723d-f9f2-438b-a5f1-9cee416aac9c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.813070] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125086d7-1f94-41cf-be23-e7e6b4e06c38 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.817136] env[62974]: DEBUG oslo_vmware.api [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1012.817136] env[62974]: value = "task-2654901" [ 1012.817136] env[62974]: _type = "Task" [ 1012.817136] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.831478] env[62974]: DEBUG nova.compute.provider_tree [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.833152] env[62974]: DEBUG nova.compute.manager [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Received event network-changed-6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1012.833447] env[62974]: DEBUG nova.compute.manager [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Refreshing instance network info cache due to event network-changed-6d33ceb1-e623-4a85-bed3-a9dba877fc7b. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1012.833733] env[62974]: DEBUG oslo_concurrency.lockutils [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] Acquiring lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.833826] env[62974]: DEBUG oslo_concurrency.lockutils [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] Acquired lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.836019] env[62974]: DEBUG nova.network.neutron [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Refreshing network info cache for port 6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.839931] env[62974]: DEBUG oslo_vmware.api [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.096297] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654899, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.203483] env[62974]: DEBUG oslo_concurrency.lockutils [None req-20afcf11-5f26-44a6-b057-c87b8514e9e9 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-18489c02-5958-431f-aede-f554d0d785ed-8c509c6a-a36e-4112-997d-b730dd15b165" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.559s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.328041] env[62974]: DEBUG oslo_vmware.api [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164422} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.328041] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.328041] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.328293] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.328325] env[62974]: INFO nova.compute.manager [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1013.328566] env[62974]: DEBUG oslo.service.loopingcall [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1013.328799] env[62974]: DEBUG nova.compute.manager [-] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1013.328867] env[62974]: DEBUG nova.network.neutron [-] [instance: 18489c02-5958-431f-aede-f554d0d785ed] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.341023] env[62974]: DEBUG nova.scheduler.client.report [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.540922] env[62974]: DEBUG nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1013.562468] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1013.562707] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1013.562866] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1013.563109] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1013.563276] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1013.563427] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1013.563634] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1013.563794] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1013.563963] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1013.564131] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1013.564307] env[62974]: DEBUG nova.virt.hardware [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1013.565179] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce3ee49-c2a1-4981-9a61-5037ee52b144 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.574146] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd12b00-7708-45eb-a833-60241a4918bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.602588] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654899, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.737488] env[62974]: DEBUG nova.network.neutron [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Updated VIF entry in instance network info cache for port 6d33ceb1-e623-4a85-bed3-a9dba877fc7b. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.737961] env[62974]: DEBUG nova.network.neutron [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Updating instance_info_cache with network_info: [{"id": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "address": "fa:16:3e:a6:a5:cd", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d33ceb1-e6", "ovs_interfaceid": "6d33ceb1-e623-4a85-bed3-a9dba877fc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.848427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.848944] env[62974]: DEBUG nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1013.852166] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.575s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.002753] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.003064] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.061521] env[62974]: DEBUG nova.compute.manager [req-9e7a5ad6-c26f-4355-86f8-40688f8c16cd req-d5543d1e-a855-43cf-95f1-2995e16af2a5 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Received event network-vif-deleted-7a680703-498d-42ed-9269-736752f5f38e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1014.061792] env[62974]: INFO nova.compute.manager [req-9e7a5ad6-c26f-4355-86f8-40688f8c16cd req-d5543d1e-a855-43cf-95f1-2995e16af2a5 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Neutron deleted interface 7a680703-498d-42ed-9269-736752f5f38e; detaching it from the instance and deleting it from the info cache [ 1014.062144] env[62974]: DEBUG nova.network.neutron [req-9e7a5ad6-c26f-4355-86f8-40688f8c16cd req-d5543d1e-a855-43cf-95f1-2995e16af2a5 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.098971] env[62974]: DEBUG oslo_vmware.api [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654899, 'name': PowerOnVM_Task, 'duration_secs': 1.032876} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.099352] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.099570] env[62974]: INFO nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1014.099762] env[62974]: DEBUG nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.100538] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4307cac-d226-412e-975a-2d4a3d7ca47e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.188297] env[62974]: DEBUG nova.network.neutron [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Successfully updated port: 298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.240916] env[62974]: DEBUG oslo_concurrency.lockutils [req-83934ad2-1c2b-44ef-b8b2-a9c46519af0c req-75984a73-a119-4ac6-a55d-f58a738dddc5 service nova] Releasing lock "refresh_cache-11bd6a5d-9590-4aa3-aaf3-99d2ac394553" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.358957] env[62974]: DEBUG nova.compute.utils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1014.366630] env[62974]: DEBUG nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1014.366630] env[62974]: DEBUG nova.network.neutron [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1014.405115] env[62974]: DEBUG nova.policy [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a86bbc98ec50467792b3c6a6cedc790b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14dd4a9a77ad40458d40bb82ac4b90a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1014.506134] env[62974]: DEBUG nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1014.540756] env[62974]: DEBUG nova.network.neutron [-] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.566594] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef458d9a-5af4-4914-92a5-2bd3ba95ab72 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.576290] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda9eb69-5cc0-4b35-bb0a-06866cf3a62b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.607080] env[62974]: DEBUG nova.compute.manager [req-9e7a5ad6-c26f-4355-86f8-40688f8c16cd req-d5543d1e-a855-43cf-95f1-2995e16af2a5 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Detach interface failed, port_id=7a680703-498d-42ed-9269-736752f5f38e, reason: Instance 18489c02-5958-431f-aede-f554d0d785ed could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1014.618879] env[62974]: INFO nova.compute.manager [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Took 28.86 seconds to build instance. [ 1014.691194] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.691361] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.691576] env[62974]: DEBUG nova.network.neutron [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1014.867063] env[62974]: DEBUG nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1014.887084] env[62974]: DEBUG nova.network.neutron [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Successfully created port: 42aa0aae-99ad-43cd-96cc-af93f45297cf {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cf73422d-7f4b-4bae-9d69-de74d7211243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 514e0f15-f27d-4fab-9107-b92884075420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 18489c02-5958-431f-aede-f554d0d785ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 7163e48f-8344-4837-bbfd-cbb5741eee5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance dca952df-dac9-4502-948b-24ac6fb939f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c90c9a6d-661f-4574-8a0d-7d8cacf8618d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 11bd6a5d-9590-4aa3-aaf3-99d2ac394553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 6d6331f3-327a-4f11-973e-37c1a3d9701c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 4de11643-da0a-453f-b03e-ca19819f4f06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.901198] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 713b503e-43b5-409c-8086-e6d36850f962 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1015.043911] env[62974]: INFO nova.compute.manager [-] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Took 1.71 seconds to deallocate network for instance. [ 1015.049755] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.121022] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2a3f493f-69bd-4c3e-85f3-ef9a366c1939 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.372s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.222038] env[62974]: DEBUG nova.network.neutron [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.346717] env[62974]: DEBUG nova.network.neutron [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updating instance_info_cache with network_info: [{"id": "298eccf6-2f42-4f6e-99da-2695849a3163", "address": "fa:16:3e:8a:1a:18", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap298eccf6-2f", "ovs_interfaceid": "298eccf6-2f42-4f6e-99da-2695849a3163", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.403643] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a44cca2f-9286-490a-9013-1fea30984fa5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1015.539706] env[62974]: INFO nova.compute.manager [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Rescuing [ 1015.540058] env[62974]: DEBUG oslo_concurrency.lockutils [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.540228] env[62974]: DEBUG oslo_concurrency.lockutils [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.540399] env[62974]: DEBUG nova.network.neutron [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.553625] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.850107] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.850448] env[62974]: DEBUG nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Instance network_info: |[{"id": "298eccf6-2f42-4f6e-99da-2695849a3163", "address": "fa:16:3e:8a:1a:18", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap298eccf6-2f", "ovs_interfaceid": "298eccf6-2f42-4f6e-99da-2695849a3163", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1015.850913] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:1a:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '298eccf6-2f42-4f6e-99da-2695849a3163', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1015.858477] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Creating folder: Project (43dc876c8a2346c7bca249407fb7fed8). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1015.858750] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab1c06c9-da30-4a23-a14d-8854a1c95f34 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.869712] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Created folder: Project (43dc876c8a2346c7bca249407fb7fed8) in parent group-v535199. [ 1015.869895] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Creating folder: Instances. Parent ref: group-v535472. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1015.870125] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5affa43d-3e4b-4771-b277-cf85337d4086 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.878858] env[62974]: DEBUG nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1015.882213] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Created folder: Instances in parent group-v535472. [ 1015.882430] env[62974]: DEBUG oslo.service.loopingcall [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1015.882610] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1015.883039] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6434e5a1-9a32-4cd8-abd4-d97ebb30c5c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.902333] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1015.902333] env[62974]: value = "task-2654905" [ 1015.902333] env[62974]: _type = "Task" [ 1015.902333] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.905993] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 220295bf-b021-4800-bc7e-a3dd311c747a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1015.906234] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1015.906378] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1015.910191] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1015.910411] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1015.910567] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1015.910745] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1015.910888] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1015.911042] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1015.911246] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1015.911402] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1015.911562] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1015.911720] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1015.911907] env[62974]: DEBUG nova.virt.hardware [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1015.913275] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7376056-dad8-4304-a609-3a660dc38ed8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.918579] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654905, 'name': CreateVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.924150] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c96dc24-76af-4c49-a777-0635ac6709d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.091833] env[62974]: DEBUG nova.compute.manager [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Received event network-vif-plugged-298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1016.092062] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.092270] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] Lock "4de11643-da0a-453f-b03e-ca19819f4f06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.092434] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] Lock "4de11643-da0a-453f-b03e-ca19819f4f06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.092623] env[62974]: DEBUG nova.compute.manager [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] No waiting events found dispatching network-vif-plugged-298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1016.094414] env[62974]: WARNING nova.compute.manager [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Received unexpected event network-vif-plugged-298eccf6-2f42-4f6e-99da-2695849a3163 for instance with vm_state building and task_state spawning. [ 1016.094414] env[62974]: DEBUG nova.compute.manager [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Received event network-changed-298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1016.094414] env[62974]: DEBUG nova.compute.manager [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Refreshing instance network info cache due to event network-changed-298eccf6-2f42-4f6e-99da-2695849a3163. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1016.094414] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] Acquiring lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.094414] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] Acquired lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.094414] env[62974]: DEBUG nova.network.neutron [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Refreshing network info cache for port 298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.119116] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364f47c9-27cc-477c-8b25-571e9413a624 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.128102] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf2b985-4f9e-4857-8ead-b0fa7079a6fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.164079] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e789bde-aae7-4ac2-a8c2-4524e4d6eea8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.175455] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93928d10-0261-4f6a-a242-19b29a2396ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.192347] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.375147] env[62974]: DEBUG nova.network.neutron [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Updating instance_info_cache with network_info: [{"id": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "address": "fa:16:3e:87:96:e8", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf60dfb07-76", "ovs_interfaceid": "f60dfb07-76b9-46c7-b661-8dd02b1e8bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.414010] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654905, 'name': CreateVM_Task, 'duration_secs': 0.333822} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.414224] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1016.414971] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.415167] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.415684] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1016.415803] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d764b64-7845-4886-87c4-371047307251 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.420760] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1016.420760] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f6c68-38f3-8766-483a-d0c6f1c084c6" [ 1016.420760] env[62974]: _type = "Task" [ 1016.420760] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.428694] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f6c68-38f3-8766-483a-d0c6f1c084c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.524398] env[62974]: DEBUG nova.network.neutron [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Successfully updated port: 42aa0aae-99ad-43cd-96cc-af93f45297cf {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1016.700577] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.835510] env[62974]: DEBUG nova.network.neutron [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updated VIF entry in instance network info cache for port 298eccf6-2f42-4f6e-99da-2695849a3163. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1016.836067] env[62974]: DEBUG nova.network.neutron [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updating instance_info_cache with network_info: [{"id": "298eccf6-2f42-4f6e-99da-2695849a3163", "address": "fa:16:3e:8a:1a:18", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap298eccf6-2f", "ovs_interfaceid": "298eccf6-2f42-4f6e-99da-2695849a3163", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.878666] env[62974]: DEBUG oslo_concurrency.lockutils [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "refresh_cache-6d6331f3-327a-4f11-973e-37c1a3d9701c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.931020] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523f6c68-38f3-8766-483a-d0c6f1c084c6, 'name': SearchDatastore_Task, 'duration_secs': 0.014355} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.931338] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.931571] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1016.931809] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.931952] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.932151] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1016.932408] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3789afc1-e1d8-4700-bcc2-7711c396fec8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.941891] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1016.942101] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1016.942819] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d423dac-adc8-468f-be10-e723bad83f32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.948772] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1016.948772] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52944d17-f131-ff95-b209-1ed35e0e514a" [ 1016.948772] env[62974]: _type = "Task" [ 1016.948772] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.959226] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52944d17-f131-ff95-b209-1ed35e0e514a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.025829] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.025932] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.026134] env[62974]: DEBUG nova.network.neutron [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.206805] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1017.207113] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.355s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.207219] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.230s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.208821] env[62974]: INFO nova.compute.claims [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.340716] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fe2adc0-c85b-41dc-a260-72257f1c8fb8 req-1bb53b27-830d-41b5-b46c-95a06cae88bf service nova] Releasing lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.459444] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52944d17-f131-ff95-b209-1ed35e0e514a, 'name': SearchDatastore_Task, 'duration_secs': 0.015897} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.460253] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd0eb1d2-d779-4584-94f9-b04d72614c36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.465155] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1017.465155] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5268fa4b-8237-89ef-4028-4a52ee52277a" [ 1017.465155] env[62974]: _type = "Task" [ 1017.465155] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.472311] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5268fa4b-8237-89ef-4028-4a52ee52277a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.556442] env[62974]: DEBUG nova.network.neutron [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1017.676448] env[62974]: DEBUG nova.network.neutron [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance_info_cache with network_info: [{"id": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "address": "fa:16:3e:5c:65:4f", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42aa0aae-99", "ovs_interfaceid": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.978744] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5268fa4b-8237-89ef-4028-4a52ee52277a, 'name': SearchDatastore_Task, 'duration_secs': 0.057801} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.979074] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.979356] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 4de11643-da0a-453f-b03e-ca19819f4f06/4de11643-da0a-453f-b03e-ca19819f4f06.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1017.979676] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a037caa-83c5-473e-8117-fa91d0199208 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.987788] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1017.987788] env[62974]: value = "task-2654907" [ 1017.987788] env[62974]: _type = "Task" [ 1017.987788] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.996316] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.124546] env[62974]: DEBUG nova.compute.manager [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Received event network-vif-plugged-42aa0aae-99ad-43cd-96cc-af93f45297cf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1018.124811] env[62974]: DEBUG oslo_concurrency.lockutils [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] Acquiring lock "713b503e-43b5-409c-8086-e6d36850f962-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.125041] env[62974]: DEBUG oslo_concurrency.lockutils [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] Lock "713b503e-43b5-409c-8086-e6d36850f962-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.125260] env[62974]: DEBUG oslo_concurrency.lockutils [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] Lock "713b503e-43b5-409c-8086-e6d36850f962-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.125444] env[62974]: DEBUG nova.compute.manager [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] No waiting events found dispatching network-vif-plugged-42aa0aae-99ad-43cd-96cc-af93f45297cf {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1018.125731] env[62974]: WARNING nova.compute.manager [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Received unexpected event network-vif-plugged-42aa0aae-99ad-43cd-96cc-af93f45297cf for instance with vm_state building and task_state spawning. [ 1018.125928] env[62974]: DEBUG nova.compute.manager [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Received event network-changed-42aa0aae-99ad-43cd-96cc-af93f45297cf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1018.126105] env[62974]: DEBUG nova.compute.manager [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Refreshing instance network info cache due to event network-changed-42aa0aae-99ad-43cd-96cc-af93f45297cf. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1018.126279] env[62974]: DEBUG oslo_concurrency.lockutils [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] Acquiring lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.179059] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.179221] env[62974]: DEBUG nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Instance network_info: |[{"id": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "address": "fa:16:3e:5c:65:4f", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42aa0aae-99", "ovs_interfaceid": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1018.179593] env[62974]: DEBUG oslo_concurrency.lockutils [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] Acquired lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.179790] env[62974]: DEBUG nova.network.neutron [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Refreshing network info cache for port 42aa0aae-99ad-43cd-96cc-af93f45297cf {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.181052] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:65:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42aa0aae-99ad-43cd-96cc-af93f45297cf', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.190083] env[62974]: DEBUG oslo.service.loopingcall [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.193041] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.193745] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-033a70cf-bcef-40e4-aa27-1973391addd6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.214281] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.214281] env[62974]: value = "task-2654908" [ 1018.214281] env[62974]: _type = "Task" [ 1018.214281] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.226614] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654908, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.401741] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87508551-f99f-45f0-b1f4-27513cc96e1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.409634] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0693e022-6522-40ce-92a6-7944e2fac04a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.413494] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.413741] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd2440bf-f037-4823-a8e5-c1700614d6d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.444488] env[62974]: DEBUG nova.network.neutron [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updated VIF entry in instance network info cache for port 42aa0aae-99ad-43cd-96cc-af93f45297cf. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1018.444858] env[62974]: DEBUG nova.network.neutron [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance_info_cache with network_info: [{"id": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "address": "fa:16:3e:5c:65:4f", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42aa0aae-99", "ovs_interfaceid": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.447362] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60efc931-44aa-4262-972c-8a4c11766d73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.450700] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1018.450700] env[62974]: value = "task-2654909" [ 1018.450700] env[62974]: _type = "Task" [ 1018.450700] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.462388] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc25c68b-9deb-4493-a646-060705d390eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.475166] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.489406] env[62974]: DEBUG nova.compute.provider_tree [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.499633] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654907, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.724209] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654908, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.948098] env[62974]: DEBUG oslo_concurrency.lockutils [req-dd10ffe0-96d4-40e0-a5c2-9c9f1195b7de req-1653ee9d-975c-498a-8954-ffe6839859eb service nova] Releasing lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.960567] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654909, 'name': PowerOffVM_Task, 'duration_secs': 0.382935} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.960861] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.961710] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddd01f0-16b6-4a44-9c46-72e58c5eb6cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.979483] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7160e146-5d7e-4517-b6ad-614748eef172 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.997223] env[62974]: DEBUG nova.scheduler.client.report [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.000257] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654907, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.733731} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.000814] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 4de11643-da0a-453f-b03e-ca19819f4f06/4de11643-da0a-453f-b03e-ca19819f4f06.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.000964] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.004711] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-215b3590-0806-4ff2-83dd-0e918f72bbf1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.008195] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1019.008195] env[62974]: value = "task-2654910" [ 1019.008195] env[62974]: _type = "Task" [ 1019.008195] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.013199] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.013997] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-744fe349-b703-4ab3-a32b-cab6eb118969 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.025930] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654910, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.025930] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1019.025930] env[62974]: value = "task-2654911" [ 1019.025930] env[62974]: _type = "Task" [ 1019.025930] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.033490] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1019.033490] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1019.033490] env[62974]: DEBUG oslo_concurrency.lockutils [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.033714] env[62974]: DEBUG oslo_concurrency.lockutils [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.033714] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.034478] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19779977-7efe-4433-a56b-2f7913adb9fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.042359] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.042584] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.043397] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2516201b-010e-4bac-be94-17ff0890a07d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.049165] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1019.049165] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52559030-3bff-90ed-9c22-5af4d02b18f8" [ 1019.049165] env[62974]: _type = "Task" [ 1019.049165] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.057447] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52559030-3bff-90ed-9c22-5af4d02b18f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.225059] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654908, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.502098] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.502651] env[62974]: DEBUG nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1019.505343] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.944s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.505538] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.507522] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.459s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.508981] env[62974]: INFO nova.compute.claims [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.520756] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654910, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064605} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.521044] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.521843] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2ee8ab-ac9e-4725-b718-d57efac802b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.545941] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 4de11643-da0a-453f-b03e-ca19819f4f06/4de11643-da0a-453f-b03e-ca19819f4f06.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.547696] env[62974]: INFO nova.scheduler.client.report [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Deleted allocations for instance 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f [ 1019.548643] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9a08cc3-baa6-41d5-8539-7974cf6337f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.576358] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52559030-3bff-90ed-9c22-5af4d02b18f8, 'name': SearchDatastore_Task, 'duration_secs': 0.008187} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.578214] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1019.578214] env[62974]: value = "task-2654913" [ 1019.578214] env[62974]: _type = "Task" [ 1019.578214] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.578469] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ba5734e-560b-4367-85af-9c5732dc94da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.588254] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1019.588254] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224b85a-274f-069b-78a5-287f2b69555a" [ 1019.588254] env[62974]: _type = "Task" [ 1019.588254] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.591553] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654913, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.600085] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224b85a-274f-069b-78a5-287f2b69555a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.730034] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654908, 'name': CreateVM_Task, 'duration_secs': 1.417487} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.730034] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.730723] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.730916] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.731271] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1019.731531] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e0f6d39-9704-4dab-8f38-90d32bd96232 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.736474] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1019.736474] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b0ac68-bbda-5893-f99b-ac3d7ce57a88" [ 1019.736474] env[62974]: _type = "Task" [ 1019.736474] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.744088] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b0ac68-bbda-5893-f99b-ac3d7ce57a88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.013986] env[62974]: DEBUG nova.compute.utils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1020.018646] env[62974]: DEBUG nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1020.018952] env[62974]: DEBUG nova.network.neutron [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1020.063215] env[62974]: DEBUG nova.policy [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a006166df7ec442d834a6b3094875125', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4e071d2e6ef4b928dd40ea5b8f81fe6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1020.069519] env[62974]: DEBUG oslo_concurrency.lockutils [None req-163320ae-bbcc-4607-9805-4aef0c3a2d98 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.477s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.093181] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.101206] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224b85a-274f-069b-78a5-287f2b69555a, 'name': SearchDatastore_Task, 'duration_secs': 0.010915} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.101485] env[62974]: DEBUG oslo_concurrency.lockutils [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.101721] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. {{(pid=62974) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1020.101972] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-307a7349-3e79-4d39-855a-d0e6736c0160 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.108781] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1020.108781] env[62974]: value = "task-2654914" [ 1020.108781] env[62974]: _type = "Task" [ 1020.108781] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.116529] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.249032] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b0ac68-bbda-5893-f99b-ac3d7ce57a88, 'name': SearchDatastore_Task, 'duration_secs': 0.010359} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.250408] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.250408] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.250408] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.250623] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.250681] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.250985] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d47e984a-ce56-4b1f-9041-5d9eda82d520 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.267715] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.267892] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1020.268711] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3d9accb-70a1-4726-bb09-a0a106008cfc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.275048] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1020.275048] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe59e6-8cb7-c502-b963-21e39c107bdd" [ 1020.275048] env[62974]: _type = "Task" [ 1020.275048] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.283879] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe59e6-8cb7-c502-b963-21e39c107bdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.365753] env[62974]: DEBUG nova.network.neutron [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Successfully created port: 3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1020.526064] env[62974]: DEBUG nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1020.596200] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654913, 'name': ReconfigVM_Task, 'duration_secs': 0.752272} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.596305] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 4de11643-da0a-453f-b03e-ca19819f4f06/4de11643-da0a-453f-b03e-ca19819f4f06.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.596966] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ae7816d-2af9-409b-bf5a-4bd2f4e579ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.605019] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1020.605019] env[62974]: value = "task-2654915" [ 1020.605019] env[62974]: _type = "Task" [ 1020.605019] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.619514] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654915, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.624613] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654914, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504438} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.624871] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. [ 1020.625667] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d9581f-5c31-41fa-9ca7-45b2e49f2fdd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.652664] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.655174] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-509b8ac0-f7a6-42cb-8eba-a551c622dc04 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.673041] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1020.673041] env[62974]: value = "task-2654916" [ 1020.673041] env[62974]: _type = "Task" [ 1020.673041] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.682763] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654916, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.768753] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838f42eb-0e85-43a8-9b23-41d665f5eec1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.780245] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23af40a-ac7a-4126-b17d-bf247431c490 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.789653] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fe59e6-8cb7-c502-b963-21e39c107bdd, 'name': SearchDatastore_Task, 'duration_secs': 0.066999} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.815989] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8e67e85-4fc9-4591-8407-a0bc3c66533b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.818738] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fccd84d-17c5-405d-abed-60145cae75c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.824734] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1020.824734] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4f7cd-fd56-d0d2-d997-791a4ad9a086" [ 1020.824734] env[62974]: _type = "Task" [ 1020.824734] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.829912] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fae0165-4564-4214-9f3e-7a7247a2c88e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.838247] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4f7cd-fd56-d0d2-d997-791a4ad9a086, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.847041] env[62974]: DEBUG nova.compute.provider_tree [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.114489] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654915, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.186328] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654916, 'name': ReconfigVM_Task, 'duration_secs': 0.306194} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.186328] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.186328] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112f76c9-0dc7-4e1c-9f46-d7240a407b31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.210559] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7807176-e61d-4c5c-8052-2b39e5964469 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.225900] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1021.225900] env[62974]: value = "task-2654918" [ 1021.225900] env[62974]: _type = "Task" [ 1021.225900] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.235282] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654918, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.337558] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4f7cd-fd56-d0d2-d997-791a4ad9a086, 'name': SearchDatastore_Task, 'duration_secs': 0.014448} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.337999] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.339337] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 713b503e-43b5-409c-8086-e6d36850f962/713b503e-43b5-409c-8086-e6d36850f962.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1021.339337] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffb266c0-cca6-4f28-a489-b00cb286fc02 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.345220] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1021.345220] env[62974]: value = "task-2654919" [ 1021.345220] env[62974]: _type = "Task" [ 1021.345220] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.354986] env[62974]: DEBUG nova.scheduler.client.report [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.360396] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.537169] env[62974]: DEBUG nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1021.574022] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.575043] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.577132] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.577132] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.577132] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.577132] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.577132] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.577132] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.577693] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.578431] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.578892] env[62974]: DEBUG nova.virt.hardware [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.580507] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c2cc0e-5ea6-4ab4-a50e-2de340346332 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.598086] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c8324e-a8b8-4e94-9817-2b4f25a0227c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.635780] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654915, 'name': Rename_Task, 'duration_secs': 0.804541} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.636579] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.637195] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b08d574-2c3e-40b6-9fe3-08c3cca4a19b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.647200] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1021.647200] env[62974]: value = "task-2654920" [ 1021.647200] env[62974]: _type = "Task" [ 1021.647200] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.660888] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654920, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.737840] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654918, 'name': ReconfigVM_Task, 'duration_secs': 0.178171} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.737840] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.737840] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74c8bc6c-d62e-4270-b0e8-749e80ea9c4a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.744362] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1021.744362] env[62974]: value = "task-2654921" [ 1021.744362] env[62974]: _type = "Task" [ 1021.744362] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.758254] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.854998] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654919, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.862949] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.863565] env[62974]: DEBUG nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1021.866378] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.313s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.866596] env[62974]: DEBUG nova.objects.instance [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'resources' on Instance uuid 18489c02-5958-431f-aede-f554d0d785ed {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.898919] env[62974]: DEBUG nova.network.neutron [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Successfully updated port: 3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.926382] env[62974]: DEBUG nova.compute.manager [req-91f4c01f-fdbe-40bc-87c8-e17fe8ca2682 req-24d66049-5707-483a-aeda-fc58563a51fd service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Received event network-vif-plugged-3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1021.926382] env[62974]: DEBUG oslo_concurrency.lockutils [req-91f4c01f-fdbe-40bc-87c8-e17fe8ca2682 req-24d66049-5707-483a-aeda-fc58563a51fd service nova] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.926382] env[62974]: DEBUG oslo_concurrency.lockutils [req-91f4c01f-fdbe-40bc-87c8-e17fe8ca2682 req-24d66049-5707-483a-aeda-fc58563a51fd service nova] Lock "a44cca2f-9286-490a-9013-1fea30984fa5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.926382] env[62974]: DEBUG oslo_concurrency.lockutils [req-91f4c01f-fdbe-40bc-87c8-e17fe8ca2682 req-24d66049-5707-483a-aeda-fc58563a51fd service nova] Lock "a44cca2f-9286-490a-9013-1fea30984fa5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.926382] env[62974]: DEBUG nova.compute.manager [req-91f4c01f-fdbe-40bc-87c8-e17fe8ca2682 req-24d66049-5707-483a-aeda-fc58563a51fd service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] No waiting events found dispatching network-vif-plugged-3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1021.927143] env[62974]: WARNING nova.compute.manager [req-91f4c01f-fdbe-40bc-87c8-e17fe8ca2682 req-24d66049-5707-483a-aeda-fc58563a51fd service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Received unexpected event network-vif-plugged-3ee36563-83e1-498a-a5a3-81a8ff2ee417 for instance with vm_state building and task_state spawning. [ 1022.157809] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654920, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.257507] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654921, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.357506] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654919, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533339} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.357980] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 713b503e-43b5-409c-8086-e6d36850f962/713b503e-43b5-409c-8086-e6d36850f962.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.358477] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.361191] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ea9877f-c978-4a84-8b93-906b320cbd5a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.367386] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1022.367386] env[62974]: value = "task-2654922" [ 1022.367386] env[62974]: _type = "Task" [ 1022.367386] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.368906] env[62974]: DEBUG nova.compute.utils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1022.374206] env[62974]: DEBUG nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1022.374206] env[62974]: DEBUG nova.network.neutron [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1022.387996] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654922, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.400287] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.400427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.400584] env[62974]: DEBUG nova.network.neutron [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.425757] env[62974]: DEBUG nova.policy [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7337dc651b624b41a4dae92e0603c534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ecf0c1b56e34a6cbc2d073089e37efc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1022.587194] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5c5315-d4b4-4f1d-89f9-798bce49e2e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.596855] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8ffaf5-66a2-4ff5-bac2-9834074e38f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.629617] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7acf90c-afed-4372-ae83-ac860fc93dbf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.638475] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fede5be5-ba4d-4ead-86ad-7db6f8a7676b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.653799] env[62974]: DEBUG nova.compute.provider_tree [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.663012] env[62974]: DEBUG oslo_vmware.api [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2654920, 'name': PowerOnVM_Task, 'duration_secs': 0.851423} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.663900] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.664089] env[62974]: INFO nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Took 9.12 seconds to spawn the instance on the hypervisor. [ 1022.664307] env[62974]: DEBUG nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.665063] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ba9f1a-e0cf-4b77-a457-723c5aa3029c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.722879] env[62974]: DEBUG nova.network.neutron [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Successfully created port: daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.758829] env[62974]: DEBUG oslo_vmware.api [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654921, 'name': PowerOnVM_Task, 'duration_secs': 0.669435} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.759263] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.764044] env[62974]: DEBUG nova.compute.manager [None req-154cd4f4-b92e-485d-9db8-7d1e1ab6aab8 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.764044] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53e2ec2-6d46-4421-b879-d09907046ea2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.789728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.789988] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.875056] env[62974]: DEBUG nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1022.883255] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654922, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.320599} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.883645] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1022.884295] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fc4b21-dca1-4657-be20-92ebaf9917ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.906124] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 713b503e-43b5-409c-8086-e6d36850f962/713b503e-43b5-409c-8086-e6d36850f962.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.908235] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fdc469e-4bfb-45d1-a342-b11509b59df8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.930167] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1022.930167] env[62974]: value = "task-2654923" [ 1022.930167] env[62974]: _type = "Task" [ 1022.930167] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.940039] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654923, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.967344] env[62974]: DEBUG nova.network.neutron [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1023.133320] env[62974]: DEBUG nova.network.neutron [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updating instance_info_cache with network_info: [{"id": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "address": "fa:16:3e:b0:69:3b", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee36563-83", "ovs_interfaceid": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.159428] env[62974]: DEBUG nova.scheduler.client.report [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.183357] env[62974]: INFO nova.compute.manager [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Took 19.32 seconds to build instance. [ 1023.293883] env[62974]: DEBUG nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1023.441158] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654923, 'name': ReconfigVM_Task, 'duration_secs': 0.311315} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.441561] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 713b503e-43b5-409c-8086-e6d36850f962/713b503e-43b5-409c-8086-e6d36850f962.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.442172] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8f4180c-1e6f-4bde-ae5c-e2d905e9bf4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.450143] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1023.450143] env[62974]: value = "task-2654925" [ 1023.450143] env[62974]: _type = "Task" [ 1023.450143] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.459541] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654925, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.635840] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.636036] env[62974]: DEBUG nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Instance network_info: |[{"id": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "address": "fa:16:3e:b0:69:3b", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee36563-83", "ovs_interfaceid": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1023.636580] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:69:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ee36563-83e1-498a-a5a3-81a8ff2ee417', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.647487] env[62974]: DEBUG oslo.service.loopingcall [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.647881] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.648008] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8ae23b2-1606-4152-8d3a-6ce5dbb1c0f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.667703] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.801s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.672030] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.672030] env[62974]: value = "task-2654926" [ 1023.672030] env[62974]: _type = "Task" [ 1023.672030] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.680982] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654926, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.684830] env[62974]: DEBUG oslo_concurrency.lockutils [None req-77dfdccc-e468-41d2-a8da-ca3a3e6354ef tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.297s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.688674] env[62974]: INFO nova.scheduler.client.report [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted allocations for instance 18489c02-5958-431f-aede-f554d0d785ed [ 1023.814738] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.815167] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.816760] env[62974]: INFO nova.compute.claims [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1023.885628] env[62974]: DEBUG nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1023.911526] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1023.911768] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.911934] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1023.912109] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.912247] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1023.912385] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1023.912576] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1023.912749] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1023.912896] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1023.913089] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1023.913248] env[62974]: DEBUG nova.virt.hardware [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1023.914831] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e40977-0f08-4433-bc96-146c06dbd14f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.923292] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140e252c-f203-4f1f-b663-b60c12d7e276 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.959498] env[62974]: DEBUG nova.compute.manager [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Received event network-changed-3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1023.959766] env[62974]: DEBUG nova.compute.manager [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Refreshing instance network info cache due to event network-changed-3ee36563-83e1-498a-a5a3-81a8ff2ee417. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1023.959989] env[62974]: DEBUG oslo_concurrency.lockutils [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] Acquiring lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.960163] env[62974]: DEBUG oslo_concurrency.lockutils [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] Acquired lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.960305] env[62974]: DEBUG nova.network.neutron [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Refreshing network info cache for port 3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.967067] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654925, 'name': Rename_Task, 'duration_secs': 0.344192} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.967335] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.968107] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b627c15c-a4a9-4115-98b5-6ecc6365a06d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.978200] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1023.978200] env[62974]: value = "task-2654927" [ 1023.978200] env[62974]: _type = "Task" [ 1023.978200] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.989552] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.030091] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "1aafddba-5da3-4c46-a537-3c178a1fec88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.030357] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.182940] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654926, 'name': CreateVM_Task, 'duration_secs': 0.408213} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.183138] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.183822] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.183984] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.184336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1024.184586] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e07d3dcb-5759-4abf-b700-982519ac028b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.189453] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1024.189453] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528a8c58-38e8-1fdc-6950-ad2f469367c4" [ 1024.189453] env[62974]: _type = "Task" [ 1024.189453] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.199536] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7a501847-81b1-4dcd-a2c1-f0782b166d21 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "18489c02-5958-431f-aede-f554d0d785ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.553s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.200972] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528a8c58-38e8-1fdc-6950-ad2f469367c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.201216] env[62974]: DEBUG oslo_concurrency.lockutils [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] Acquired lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.202133] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9623ab8-d8c7-464f-9e36-871e30711c77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.210173] env[62974]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1024.210353] env[62974]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62974) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1024.210727] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-238ded1c-2327-4fc0-a724-1196a82494c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.221272] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5730fcf7-6a01-474f-8478-225f72ca8de3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.259719] env[62974]: ERROR root [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-535449' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-535449' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-535449' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-535449'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-535449' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-535449' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-535449'}\n"]: nova.exception.InstanceNotFound: Instance 18489c02-5958-431f-aede-f554d0d785ed could not be found. [ 1024.260032] env[62974]: DEBUG oslo_concurrency.lockutils [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] Releasing lock "18489c02-5958-431f-aede-f554d0d785ed" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.260265] env[62974]: DEBUG nova.compute.manager [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Detach interface failed, port_id=8c509c6a-a36e-4112-997d-b730dd15b165, reason: Instance 18489c02-5958-431f-aede-f554d0d785ed could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1024.260395] env[62974]: DEBUG nova.compute.manager [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-vif-deleted-40646b9d-b80e-40c3-9130-dcb5916cb108 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1024.260576] env[62974]: DEBUG nova.compute.manager [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-vif-deleted-dc3ff6b9-4b12-45cf-b797-2d0daee5530a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1024.260776] env[62974]: DEBUG nova.compute.manager [req-6aa13e35-1e95-4298-a57b-ebc73c525905 req-37e768c3-cb7b-4eb5-a159-2c2aca6fa128 service nova] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Received event network-vif-deleted-f00dab5c-4be5-45af-a966-24a2317d5c0c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1024.410137] env[62974]: DEBUG nova.network.neutron [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Successfully updated port: daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.490519] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654927, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.533180] env[62974]: DEBUG nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1024.677942] env[62974]: DEBUG nova.network.neutron [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updated VIF entry in instance network info cache for port 3ee36563-83e1-498a-a5a3-81a8ff2ee417. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1024.678397] env[62974]: DEBUG nova.network.neutron [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updating instance_info_cache with network_info: [{"id": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "address": "fa:16:3e:b0:69:3b", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee36563-83", "ovs_interfaceid": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.702210] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528a8c58-38e8-1fdc-6950-ad2f469367c4, 'name': SearchDatastore_Task, 'duration_secs': 0.012789} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.702573] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.702839] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.703136] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.703315] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.703526] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.703826] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d42e9b2e-686c-46d6-b46f-6533540f85c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.715204] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.715412] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.716268] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcfaa06a-352a-47d0-9974-b41d4c1f16fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.723067] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1024.723067] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5233a2c9-a86b-d7de-6100-e5c84890dbc1" [ 1024.723067] env[62974]: _type = "Task" [ 1024.723067] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.732386] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5233a2c9-a86b-d7de-6100-e5c84890dbc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.913123] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.913394] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.913893] env[62974]: DEBUG nova.network.neutron [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.992351] env[62974]: DEBUG oslo_vmware.api [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654927, 'name': PowerOnVM_Task, 'duration_secs': 0.54367} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.992351] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1024.992351] env[62974]: INFO nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1024.992351] env[62974]: DEBUG nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.992351] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235a02f4-3fb9-43b7-a82d-eb27536fecfa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.056563] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5df8c4-c425-48af-97c2-5593b1fbbeef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.061295] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.067698] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e6a8e6-eb09-4433-a6af-9daa6f60b3b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.102097] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4acfd2-816b-473c-b665-0e5dd3b023e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.110723] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f0f3bf-e932-4dcd-94b9-24bc6eecd362 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.125535] env[62974]: DEBUG nova.compute.provider_tree [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.181374] env[62974]: DEBUG oslo_concurrency.lockutils [req-8189110c-ed1c-48da-9134-6be70a734461 req-ed51bcb2-2b07-4c62-a915-71f4fbddf1aa service nova] Releasing lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.234506] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5233a2c9-a86b-d7de-6100-e5c84890dbc1, 'name': SearchDatastore_Task, 'duration_secs': 0.011669} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.235345] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1456b38c-4b48-4f03-b25d-6db237b0ddc6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.241847] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1025.241847] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5293a0c9-9fee-24a5-fee3-a8487cd343b3" [ 1025.241847] env[62974]: _type = "Task" [ 1025.241847] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.250727] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5293a0c9-9fee-24a5-fee3-a8487cd343b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.446327] env[62974]: DEBUG nova.network.neutron [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.514782] env[62974]: INFO nova.compute.manager [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Took 16.04 seconds to build instance. [ 1025.584829] env[62974]: DEBUG nova.network.neutron [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.628804] env[62974]: DEBUG nova.scheduler.client.report [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.753482] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5293a0c9-9fee-24a5-fee3-a8487cd343b3, 'name': SearchDatastore_Task, 'duration_secs': 0.016172} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.753816] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.754359] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a44cca2f-9286-490a-9013-1fea30984fa5/a44cca2f-9286-490a-9013-1fea30984fa5.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.754359] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea1d6468-f3c2-429a-8c89-ac22268ef7da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.763079] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1025.763079] env[62974]: value = "task-2654929" [ 1025.763079] env[62974]: _type = "Task" [ 1025.763079] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.772635] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.018729] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4a5b44e5-465c-4cb5-a8d1-f53470a3b2fa tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.551s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.087888] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.088714] env[62974]: DEBUG nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Instance network_info: |[{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1026.089165] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:15:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7f41333-42ee-47f3-936c-d6701ab786d2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'daa4e7d6-34e5-4455-b28f-6ee056ef2e93', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.110363] env[62974]: DEBUG oslo.service.loopingcall [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.110748] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.111387] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-000a1707-04fc-41e1-8f29-f42d7fef075e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.144833] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.145724] env[62974]: DEBUG nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1026.151743] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.091s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.153259] env[62974]: INFO nova.compute.claims [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.157924] env[62974]: DEBUG nova.compute.manager [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Received event network-changed-298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1026.157924] env[62974]: DEBUG nova.compute.manager [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Refreshing instance network info cache due to event network-changed-298eccf6-2f42-4f6e-99da-2695849a3163. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1026.158167] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Acquiring lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.158286] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Acquired lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.158584] env[62974]: DEBUG nova.network.neutron [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Refreshing network info cache for port 298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.167338] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.167338] env[62974]: value = "task-2654930" [ 1026.167338] env[62974]: _type = "Task" [ 1026.167338] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.181994] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654930, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.275204] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654929, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.662217] env[62974]: DEBUG nova.compute.utils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1026.668275] env[62974]: DEBUG nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1026.668396] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1026.682268] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654930, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.727117] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.727362] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.748274] env[62974]: DEBUG nova.policy [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8a9d68e68144844ad4b6c02916f3e9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6453d2c53e34f6da5e0bf34d846e663', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1026.774583] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.819373} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.774850] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] a44cca2f-9286-490a-9013-1fea30984fa5/a44cca2f-9286-490a-9013-1fea30984fa5.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.775064] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.775328] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-419ebe9f-c358-4da9-9149-e456e7b56126 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.782893] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1026.782893] env[62974]: value = "task-2654931" [ 1026.782893] env[62974]: _type = "Task" [ 1026.782893] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.793535] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.045335] env[62974]: DEBUG nova.network.neutron [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updated VIF entry in instance network info cache for port 298eccf6-2f42-4f6e-99da-2695849a3163. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.045737] env[62974]: DEBUG nova.network.neutron [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updating instance_info_cache with network_info: [{"id": "298eccf6-2f42-4f6e-99da-2695849a3163", "address": "fa:16:3e:8a:1a:18", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap298eccf6-2f", "ovs_interfaceid": "298eccf6-2f42-4f6e-99da-2695849a3163", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.169409] env[62974]: DEBUG nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1027.195738] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654930, 'name': CreateVM_Task, 'duration_secs': 0.687554} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.196946] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Successfully created port: ec393463-0f9a-4e87-ac77-0f8294550bfd {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1027.198908] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.199877] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.200170] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.200687] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1027.205986] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5397008-c9a0-4ddb-80fa-e85dd245a8a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.214948] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1027.214948] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5278b2db-f0ff-5a1e-58c2-b5f14682c10f" [ 1027.214948] env[62974]: _type = "Task" [ 1027.214948] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.227270] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5278b2db-f0ff-5a1e-58c2-b5f14682c10f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.230401] env[62974]: DEBUG nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1027.294577] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.231079} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.295558] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.295703] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fcb359-ba21-4dc2-aa17-28bc5ccd0f97 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.322802] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] a44cca2f-9286-490a-9013-1fea30984fa5/a44cca2f-9286-490a-9013-1fea30984fa5.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.328415] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-537da45d-2fba-4a85-8632-5d975078fe27 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.343898] env[62974]: DEBUG nova.compute.manager [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1027.354604] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1027.354604] env[62974]: value = "task-2654933" [ 1027.354604] env[62974]: _type = "Task" [ 1027.354604] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.366963] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654933, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.475239] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b442d9d6-6fb8-468d-8742-56703c3b584f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.478643] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Successfully created port: 04938e84-a6a7-41b3-8f7d-f5f881420f7a {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1027.485853] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5dd6c1-2059-4dd3-9ed5-33d53f844f02 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.520123] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939c9ac2-a083-47df-aa18-980e494eac49 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.528395] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d61891-0042-4e0d-ac05-0b9ec2c0f9f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.545535] env[62974]: DEBUG nova.compute.provider_tree [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1027.549075] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Releasing lock "refresh_cache-4de11643-da0a-453f-b03e-ca19819f4f06" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.549075] env[62974]: DEBUG nova.compute.manager [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Received event network-vif-plugged-daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1027.549264] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.549345] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.549503] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.549714] env[62974]: DEBUG nova.compute.manager [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] No waiting events found dispatching network-vif-plugged-daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1027.549955] env[62974]: WARNING nova.compute.manager [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Received unexpected event network-vif-plugged-daa4e7d6-34e5-4455-b28f-6ee056ef2e93 for instance with vm_state building and task_state spawning. [ 1027.550146] env[62974]: DEBUG nova.compute.manager [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Received event network-changed-daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1027.550333] env[62974]: DEBUG nova.compute.manager [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Refreshing instance network info cache due to event network-changed-daa4e7d6-34e5-4455-b28f-6ee056ef2e93. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1027.551151] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Acquiring lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.551151] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Acquired lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.551151] env[62974]: DEBUG nova.network.neutron [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Refreshing network info cache for port daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.728329] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5278b2db-f0ff-5a1e-58c2-b5f14682c10f, 'name': SearchDatastore_Task, 'duration_secs': 0.020051} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.728744] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.729122] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.729422] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.729677] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.729938] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.730228] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ea883b7-9c84-48a4-b7af-7c95144d8767 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.743220] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.743468] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.744267] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92c2cef6-6578-40d0-87be-d4590132a087 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.751841] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1027.751841] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224ea1d-2ff8-6fa7-0106-972361283001" [ 1027.751841] env[62974]: _type = "Task" [ 1027.751841] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.753016] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.765717] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224ea1d-2ff8-6fa7-0106-972361283001, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.862188] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.865781] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.069092] env[62974]: ERROR nova.scheduler.client.report [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [req-c4569823-95d3-4e9c-bd13-e32645835115] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c4569823-95d3-4e9c-bd13-e32645835115"}]} [ 1028.083721] env[62974]: DEBUG nova.scheduler.client.report [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1028.098029] env[62974]: DEBUG nova.scheduler.client.report [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1028.098029] env[62974]: DEBUG nova.compute.provider_tree [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.110625] env[62974]: DEBUG nova.scheduler.client.report [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1028.133732] env[62974]: DEBUG nova.scheduler.client.report [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1028.185879] env[62974]: DEBUG nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1028.219230] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1028.219571] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.219763] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1028.219952] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.220578] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1028.220995] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1028.221287] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1028.221523] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1028.221653] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1028.221836] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1028.222179] env[62974]: DEBUG nova.virt.hardware [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1028.222937] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f9e4e9-6749-4476-b98f-e9b8acf92bd9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.235340] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbc8ac1-f855-4f0d-b1e0-14b604eab328 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.267932] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224ea1d-2ff8-6fa7-0106-972361283001, 'name': SearchDatastore_Task, 'duration_secs': 0.024727} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.268985] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3beaf19-36a5-46c1-bc51-ce142bd9ee7e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.275889] env[62974]: DEBUG nova.network.neutron [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updated VIF entry in instance network info cache for port daa4e7d6-34e5-4455-b28f-6ee056ef2e93. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.276264] env[62974]: DEBUG nova.network.neutron [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.278639] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1028.278639] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52669060-693a-ebdb-ba49-5734500bf314" [ 1028.278639] env[62974]: _type = "Task" [ 1028.278639] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.288062] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52669060-693a-ebdb-ba49-5734500bf314, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.351555] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bac632-ee0f-48be-b388-03a86bf7bccb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.367069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70b775c-4bcd-4a79-aa4b-9a4a6ebea7e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.370299] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.398981] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5658457-eaf4-47a6-82ba-8069ca51abed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.407184] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe96e91-42b9-4128-a6b8-1af8a3e08c0d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.422663] env[62974]: DEBUG nova.compute.provider_tree [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.779989] env[62974]: DEBUG oslo_concurrency.lockutils [req-ddc7d5de-e125-41ec-9e44-24d72956cd8d req-b5237306-d883-47b3-99bc-8a48db508a3b service nova] Releasing lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.791461] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52669060-693a-ebdb-ba49-5734500bf314, 'name': SearchDatastore_Task, 'duration_secs': 0.02786} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.791789] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.792104] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.792403] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a924777d-7216-43d1-8375-be78992e0ae4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.801314] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1028.801314] env[62974]: value = "task-2654934" [ 1028.801314] env[62974]: _type = "Task" [ 1028.801314] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.811376] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.866351] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654933, 'name': ReconfigVM_Task, 'duration_secs': 1.091035} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.866658] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfigured VM instance instance-00000065 to attach disk [datastore1] a44cca2f-9286-490a-9013-1fea30984fa5/a44cca2f-9286-490a-9013-1fea30984fa5.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.867289] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bd39a36-8d47-48a3-97ec-dcf547af8fcd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.875668] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1028.875668] env[62974]: value = "task-2654935" [ 1028.875668] env[62974]: _type = "Task" [ 1028.875668] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.885497] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654935, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.895687] env[62974]: DEBUG nova.compute.manager [req-07910cb7-fcad-452a-bd9c-65521ca2332b req-89f39e78-2bb1-4e1d-b3ef-2a361da5ccb7 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received event network-vif-plugged-ec393463-0f9a-4e87-ac77-0f8294550bfd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1028.896099] env[62974]: DEBUG oslo_concurrency.lockutils [req-07910cb7-fcad-452a-bd9c-65521ca2332b req-89f39e78-2bb1-4e1d-b3ef-2a361da5ccb7 service nova] Acquiring lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.896342] env[62974]: DEBUG oslo_concurrency.lockutils [req-07910cb7-fcad-452a-bd9c-65521ca2332b req-89f39e78-2bb1-4e1d-b3ef-2a361da5ccb7 service nova] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.896579] env[62974]: DEBUG oslo_concurrency.lockutils [req-07910cb7-fcad-452a-bd9c-65521ca2332b req-89f39e78-2bb1-4e1d-b3ef-2a361da5ccb7 service nova] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.896756] env[62974]: DEBUG nova.compute.manager [req-07910cb7-fcad-452a-bd9c-65521ca2332b req-89f39e78-2bb1-4e1d-b3ef-2a361da5ccb7 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] No waiting events found dispatching network-vif-plugged-ec393463-0f9a-4e87-ac77-0f8294550bfd {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1028.896925] env[62974]: WARNING nova.compute.manager [req-07910cb7-fcad-452a-bd9c-65521ca2332b req-89f39e78-2bb1-4e1d-b3ef-2a361da5ccb7 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received unexpected event network-vif-plugged-ec393463-0f9a-4e87-ac77-0f8294550bfd for instance with vm_state building and task_state spawning. [ 1028.953581] env[62974]: DEBUG nova.scheduler.client.report [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 136 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1028.953867] env[62974]: DEBUG nova.compute.provider_tree [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 136 to 137 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1028.954074] env[62974]: DEBUG nova.compute.provider_tree [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.982688] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Successfully updated port: ec393463-0f9a-4e87-ac77-0f8294550bfd {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1029.312206] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654934, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.386158] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654935, 'name': Rename_Task, 'duration_secs': 0.155105} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.386456] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.386935] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc78e98d-d3c6-48e5-a430-8a50c7e38b28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.393889] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1029.393889] env[62974]: value = "task-2654936" [ 1029.393889] env[62974]: _type = "Task" [ 1029.393889] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.401926] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.459671] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.308s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.460251] env[62974]: DEBUG nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1029.463507] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.711s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.465243] env[62974]: INFO nova.compute.claims [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.812702] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548708} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.812989] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1029.813230] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.813495] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59cb6159-b87d-443e-920e-b5db044cba47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.820802] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1029.820802] env[62974]: value = "task-2654938" [ 1029.820802] env[62974]: _type = "Task" [ 1029.820802] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.829674] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.904587] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654936, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.969768] env[62974]: DEBUG nova.compute.utils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1029.973372] env[62974]: DEBUG nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1029.973543] env[62974]: DEBUG nova.network.neutron [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1030.012640] env[62974]: DEBUG nova.policy [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82cbd050443849dba65c7c3ccd578590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21909beb1faa4a2c994925764408480f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1030.265898] env[62974]: DEBUG nova.network.neutron [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Successfully created port: f0ed519a-7e15-4b09-9180-2b0c2abe1052 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1030.335193] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071634} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.335193] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.335193] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2e736e-ec83-4677-bebf-141c7226caf8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.359384] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.359761] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-673326ee-f77e-4d46-a28c-529c88f9430f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.380109] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1030.380109] env[62974]: value = "task-2654939" [ 1030.380109] env[62974]: _type = "Task" [ 1030.380109] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.388883] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654939, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.403974] env[62974]: DEBUG oslo_vmware.api [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2654936, 'name': PowerOnVM_Task, 'duration_secs': 0.81661} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.404352] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1030.404603] env[62974]: INFO nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Took 8.87 seconds to spawn the instance on the hypervisor. [ 1030.404789] env[62974]: DEBUG nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.405594] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8c64c0-f523-43ef-a21f-64e575d76d90 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.474505] env[62974]: DEBUG nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1030.670887] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d762651-4c2b-4fa3-8071-0cd9d64ec6a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.679767] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d733393a-d8c9-4431-9112-9744fbc9f303 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.712663] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c041dec2-c152-4697-a9fe-91ca3bd58697 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.720598] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e71c5a-bf36-400d-8366-642f8911950a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.735536] env[62974]: DEBUG nova.compute.provider_tree [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.890748] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654939, 'name': ReconfigVM_Task, 'duration_secs': 0.366065} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.891267] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.891917] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c841aad-b0b5-457e-bcb4-6ec8aaeb435c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.899608] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1030.899608] env[62974]: value = "task-2654940" [ 1030.899608] env[62974]: _type = "Task" [ 1030.899608] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.907904] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654940, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.923732] env[62974]: INFO nova.compute.manager [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Took 18.97 seconds to build instance. [ 1031.153060] env[62974]: DEBUG nova.compute.manager [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received event network-changed-ec393463-0f9a-4e87-ac77-0f8294550bfd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.153269] env[62974]: DEBUG nova.compute.manager [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Refreshing instance network info cache due to event network-changed-ec393463-0f9a-4e87-ac77-0f8294550bfd. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1031.153494] env[62974]: DEBUG oslo_concurrency.lockutils [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] Acquiring lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.153635] env[62974]: DEBUG oslo_concurrency.lockutils [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] Acquired lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.153797] env[62974]: DEBUG nova.network.neutron [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Refreshing network info cache for port ec393463-0f9a-4e87-ac77-0f8294550bfd {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.185546] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Successfully updated port: 04938e84-a6a7-41b3-8f7d-f5f881420f7a {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1031.238776] env[62974]: DEBUG nova.scheduler.client.report [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1031.412568] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654940, 'name': Rename_Task, 'duration_secs': 0.167854} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.412919] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1031.413241] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b151dfc0-8e53-4190-9cd7-1cc9e8c56b3a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.421431] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1031.421431] env[62974]: value = "task-2654941" [ 1031.421431] env[62974]: _type = "Task" [ 1031.421431] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.426298] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b0333015-1760-464a-95bb-0376165e5ea1 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.481s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.432655] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654941, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.488408] env[62974]: DEBUG nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1031.524923] env[62974]: DEBUG nova.compute.manager [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Received event network-changed-3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.525131] env[62974]: DEBUG nova.compute.manager [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Refreshing instance network info cache due to event network-changed-3ee36563-83e1-498a-a5a3-81a8ff2ee417. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1031.525346] env[62974]: DEBUG oslo_concurrency.lockutils [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] Acquiring lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.525484] env[62974]: DEBUG oslo_concurrency.lockutils [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] Acquired lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.525650] env[62974]: DEBUG nova.network.neutron [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Refreshing network info cache for port 3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.529714] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1031.530106] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.531537] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1031.531537] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.531537] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1031.531537] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1031.531537] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1031.531917] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1031.532341] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1031.532412] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1031.532680] env[62974]: DEBUG nova.virt.hardware [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1031.534052] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b30b6e-4a02-4e22-968c-c8ae084fdcac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.546136] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46d4c4b-976a-4d61-8897-0b8d5d6f07b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.691095] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.708563] env[62974]: DEBUG nova.network.neutron [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.744956] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.745553] env[62974]: DEBUG nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1031.749013] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.887s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.863166] env[62974]: DEBUG nova.network.neutron [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.931632] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654941, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.988979] env[62974]: DEBUG nova.network.neutron [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Successfully updated port: f0ed519a-7e15-4b09-9180-2b0c2abe1052 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.252886] env[62974]: DEBUG nova.compute.utils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1032.260135] env[62974]: INFO nova.compute.claims [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.267798] env[62974]: DEBUG nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1032.267798] env[62974]: DEBUG nova.network.neutron [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1032.273821] env[62974]: DEBUG nova.network.neutron [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updated VIF entry in instance network info cache for port 3ee36563-83e1-498a-a5a3-81a8ff2ee417. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.273821] env[62974]: DEBUG nova.network.neutron [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updating instance_info_cache with network_info: [{"id": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "address": "fa:16:3e:b0:69:3b", "network": {"id": "af586413-ffb2-476a-9335-03d2b25beeb5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-804505901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4e071d2e6ef4b928dd40ea5b8f81fe6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee36563-83", "ovs_interfaceid": "3ee36563-83e1-498a-a5a3-81a8ff2ee417", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.304371] env[62974]: DEBUG nova.policy [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1032.365479] env[62974]: DEBUG oslo_concurrency.lockutils [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] Releasing lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.365803] env[62974]: DEBUG nova.compute.manager [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received event network-vif-plugged-04938e84-a6a7-41b3-8f7d-f5f881420f7a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1032.366042] env[62974]: DEBUG oslo_concurrency.lockutils [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] Acquiring lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.366264] env[62974]: DEBUG oslo_concurrency.lockutils [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.366430] env[62974]: DEBUG oslo_concurrency.lockutils [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.366624] env[62974]: DEBUG nova.compute.manager [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] No waiting events found dispatching network-vif-plugged-04938e84-a6a7-41b3-8f7d-f5f881420f7a {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.366853] env[62974]: WARNING nova.compute.manager [req-9aa8f72a-a333-44f0-a777-b34aad79bbc2 req-a9f28157-0728-43b7-8453-69bce677fd23 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received unexpected event network-vif-plugged-04938e84-a6a7-41b3-8f7d-f5f881420f7a for instance with vm_state building and task_state spawning. [ 1032.367306] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.367467] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.433086] env[62974]: DEBUG oslo_vmware.api [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2654941, 'name': PowerOnVM_Task, 'duration_secs': 0.686819} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.433377] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.433579] env[62974]: INFO nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Took 8.55 seconds to spawn the instance on the hypervisor. [ 1032.433774] env[62974]: DEBUG nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1032.434654] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14bcff87-7716-46fa-aa9d-e4e9374317a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.491490] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.491490] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.491623] env[62974]: DEBUG nova.network.neutron [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.530747] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "7163e48f-8344-4837-bbfd-cbb5741eee5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.530877] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.531179] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "7163e48f-8344-4837-bbfd-cbb5741eee5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.531399] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.531578] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.534617] env[62974]: INFO nova.compute.manager [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Terminating instance [ 1032.600253] env[62974]: DEBUG nova.network.neutron [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Successfully created port: 4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.770468] env[62974]: DEBUG nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1032.775022] env[62974]: INFO nova.compute.resource_tracker [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating resource usage from migration 9ff5649a-0b47-462b-99bb-9611fad0581f [ 1032.782921] env[62974]: DEBUG oslo_concurrency.lockutils [req-e04a148f-c777-4738-8691-20a5f428e05a req-8ef0979c-1967-4f6e-b616-c14f17817660 service nova] Releasing lock "refresh_cache-a44cca2f-9286-490a-9013-1fea30984fa5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.930247] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.964670] env[62974]: INFO nova.compute.manager [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Took 17.95 seconds to build instance. [ 1033.039104] env[62974]: DEBUG nova.compute.manager [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1033.039347] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.040680] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686e930f-f74d-48dc-b7d9-9d8ac27a9a76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.053300] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.053575] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58c580a8-6cea-4329-96ac-26ffed55b265 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.058633] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d00ea3f-1dce-4606-a273-aae3f451b2ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.064247] env[62974]: DEBUG oslo_vmware.api [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 1033.064247] env[62974]: value = "task-2654942" [ 1033.064247] env[62974]: _type = "Task" [ 1033.064247] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.071081] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df2d72c-8caf-4dad-b787-5d7eea37496a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.077413] env[62974]: DEBUG oslo_vmware.api [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.078289] env[62974]: DEBUG nova.network.neutron [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.115172] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ac7446-1ed2-4022-9696-e463b4b36cf4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.124380] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05723d7-712e-4fbf-83cf-7310fa2830f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.139275] env[62974]: DEBUG nova.compute.provider_tree [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.204198] env[62974]: DEBUG nova.compute.manager [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received event network-changed-04938e84-a6a7-41b3-8f7d-f5f881420f7a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1033.204464] env[62974]: DEBUG nova.compute.manager [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Refreshing instance network info cache due to event network-changed-04938e84-a6a7-41b3-8f7d-f5f881420f7a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1033.204724] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Acquiring lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.372688] env[62974]: DEBUG nova.network.neutron [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Updating instance_info_cache with network_info: [{"id": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "address": "fa:16:3e:07:aa:39", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ed519a-7e", "ovs_interfaceid": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.467705] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f3273f79-e2da-40cd-8736-7d2f57d5c087 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.464s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.503675] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.503872] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.549369] env[62974]: DEBUG nova.network.neutron [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Updating instance_info_cache with network_info: [{"id": "ec393463-0f9a-4e87-ac77-0f8294550bfd", "address": "fa:16:3e:9d:dd:ec", "network": {"id": "387ca993-767d-4059-93c4-80c0903c8205", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-316457913", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "225b6979-9329-403b-91fa-138bd41f6e83", "external-id": "nsx-vlan-transportzone-38", "segmentation_id": 38, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec393463-0f", "ovs_interfaceid": "ec393463-0f9a-4e87-ac77-0f8294550bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "04938e84-a6a7-41b3-8f7d-f5f881420f7a", "address": "fa:16:3e:58:d9:5e", "network": {"id": "2e3e97e2-c78c-4c6d-b4cd-cc864fbf864a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1017062769", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04938e84-a6", "ovs_interfaceid": "04938e84-a6a7-41b3-8f7d-f5f881420f7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.575496] env[62974]: DEBUG oslo_vmware.api [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654942, 'name': PowerOffVM_Task, 'duration_secs': 0.184496} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.575774] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.575946] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1033.576219] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a25c3cfc-a59e-41b8-9e13-e4502862c9fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.643054] env[62974]: DEBUG nova.scheduler.client.report [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.647389] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1033.647629] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1033.647934] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Deleting the datastore file [datastore1] 7163e48f-8344-4837-bbfd-cbb5741eee5d {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1033.648462] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01f7cbef-41ff-4a03-8b14-481a271c09f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.656253] env[62974]: DEBUG oslo_vmware.api [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for the task: (returnval){ [ 1033.656253] env[62974]: value = "task-2654944" [ 1033.656253] env[62974]: _type = "Task" [ 1033.656253] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.667582] env[62974]: DEBUG oslo_vmware.api [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.790216] env[62974]: DEBUG nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1033.821135] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.821428] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.821586] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.821767] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.821916] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.822121] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.822354] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.822517] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.822681] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.822840] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.823011] env[62974]: DEBUG nova.virt.hardware [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.823881] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b44517-aaef-4d3c-9084-00736e9d5ed0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.832889] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af36688-bb64-4e81-9c6f-350d0ecb1fb8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.877728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.878161] env[62974]: DEBUG nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Instance network_info: |[{"id": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "address": "fa:16:3e:07:aa:39", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ed519a-7e", "ovs_interfaceid": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1033.878566] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:aa:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0ed519a-7e15-4b09-9180-2b0c2abe1052', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.886598] env[62974]: DEBUG oslo.service.loopingcall [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.886776] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.886964] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7851872-98a3-45be-86ba-a15d93635c6b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.908418] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.908418] env[62974]: value = "task-2654945" [ 1033.908418] env[62974]: _type = "Task" [ 1033.908418] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.918083] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654945, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.006968] env[62974]: DEBUG nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1034.054041] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Releasing lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.054041] env[62974]: DEBUG nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Instance network_info: |[{"id": "ec393463-0f9a-4e87-ac77-0f8294550bfd", "address": "fa:16:3e:9d:dd:ec", "network": {"id": "387ca993-767d-4059-93c4-80c0903c8205", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-316457913", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "225b6979-9329-403b-91fa-138bd41f6e83", "external-id": "nsx-vlan-transportzone-38", "segmentation_id": 38, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec393463-0f", "ovs_interfaceid": "ec393463-0f9a-4e87-ac77-0f8294550bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "04938e84-a6a7-41b3-8f7d-f5f881420f7a", "address": "fa:16:3e:58:d9:5e", "network": {"id": "2e3e97e2-c78c-4c6d-b4cd-cc864fbf864a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1017062769", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04938e84-a6", "ovs_interfaceid": "04938e84-a6a7-41b3-8f7d-f5f881420f7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1034.054402] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Acquired lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.054498] env[62974]: DEBUG nova.network.neutron [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Refreshing network info cache for port 04938e84-a6a7-41b3-8f7d-f5f881420f7a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.058342] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:dd:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '225b6979-9329-403b-91fa-138bd41f6e83', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec393463-0f9a-4e87-ac77-0f8294550bfd', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:d9:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15165046-2de9-4ada-9e99-0126e20854a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04938e84-a6a7-41b3-8f7d-f5f881420f7a', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.071575] env[62974]: DEBUG oslo.service.loopingcall [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.076191] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.077915] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bda39660-dd00-46c3-8b92-dec199b2f0d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.107728] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.107728] env[62974]: value = "task-2654946" [ 1034.107728] env[62974]: _type = "Task" [ 1034.107728] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.116895] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654946, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.149833] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.401s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.150214] env[62974]: INFO nova.compute.manager [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Migrating [ 1034.178874] env[62974]: DEBUG oslo_vmware.api [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Task: {'id': task-2654944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.497155} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.179582] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.179839] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.180106] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.180284] env[62974]: INFO nova.compute.manager [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1034.180559] env[62974]: DEBUG oslo.service.loopingcall [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.180750] env[62974]: DEBUG nova.compute.manager [-] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1034.180882] env[62974]: DEBUG nova.network.neutron [-] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.199086] env[62974]: DEBUG nova.network.neutron [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Successfully updated port: 4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.424310] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654945, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.476780] env[62974]: DEBUG nova.network.neutron [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Updated VIF entry in instance network info cache for port 04938e84-a6a7-41b3-8f7d-f5f881420f7a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.476937] env[62974]: DEBUG nova.network.neutron [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Updating instance_info_cache with network_info: [{"id": "ec393463-0f9a-4e87-ac77-0f8294550bfd", "address": "fa:16:3e:9d:dd:ec", "network": {"id": "387ca993-767d-4059-93c4-80c0903c8205", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-316457913", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "225b6979-9329-403b-91fa-138bd41f6e83", "external-id": "nsx-vlan-transportzone-38", "segmentation_id": 38, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec393463-0f", "ovs_interfaceid": "ec393463-0f9a-4e87-ac77-0f8294550bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "04938e84-a6a7-41b3-8f7d-f5f881420f7a", "address": "fa:16:3e:58:d9:5e", "network": {"id": "2e3e97e2-c78c-4c6d-b4cd-cc864fbf864a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1017062769", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d6453d2c53e34f6da5e0bf34d846e663", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04938e84-a6", "ovs_interfaceid": "04938e84-a6a7-41b3-8f7d-f5f881420f7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.534865] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.534865] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.537354] env[62974]: INFO nova.compute.claims [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.619088] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654946, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.669417] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.669417] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.669417] env[62974]: DEBUG nova.network.neutron [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.702344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.703694] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.703694] env[62974]: DEBUG nova.network.neutron [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.921141] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654945, 'name': CreateVM_Task, 'duration_secs': 0.527374} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.921333] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.922054] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.922225] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.922556] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.922855] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f33eb86-8230-464c-9291-58d8dad693e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.930106] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1034.930106] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523e0c2b-3990-fcf8-3407-44677ad84b6d" [ 1034.930106] env[62974]: _type = "Task" [ 1034.930106] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.939425] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523e0c2b-3990-fcf8-3407-44677ad84b6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.980219] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Releasing lock "refresh_cache-8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.980512] env[62974]: DEBUG nova.compute.manager [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Received event network-vif-plugged-f0ed519a-7e15-4b09-9180-2b0c2abe1052 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1034.980714] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Acquiring lock "1aafddba-5da3-4c46-a537-3c178a1fec88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.980986] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.981176] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.981349] env[62974]: DEBUG nova.compute.manager [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] No waiting events found dispatching network-vif-plugged-f0ed519a-7e15-4b09-9180-2b0c2abe1052 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.981519] env[62974]: WARNING nova.compute.manager [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Received unexpected event network-vif-plugged-f0ed519a-7e15-4b09-9180-2b0c2abe1052 for instance with vm_state building and task_state spawning. [ 1034.981833] env[62974]: DEBUG nova.compute.manager [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Received event network-changed-f0ed519a-7e15-4b09-9180-2b0c2abe1052 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1034.981833] env[62974]: DEBUG nova.compute.manager [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Refreshing instance network info cache due to event network-changed-f0ed519a-7e15-4b09-9180-2b0c2abe1052. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1034.982084] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Acquiring lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.982243] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Acquired lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.982406] env[62974]: DEBUG nova.network.neutron [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Refreshing network info cache for port f0ed519a-7e15-4b09-9180-2b0c2abe1052 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.119505] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654946, 'name': CreateVM_Task, 'duration_secs': 0.599547} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.120225] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.121012] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.244183] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Received event network-changed-daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1035.244436] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Refreshing instance network info cache due to event network-changed-daa4e7d6-34e5-4455-b28f-6ee056ef2e93. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1035.244696] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Acquiring lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.244848] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Acquired lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.245016] env[62974]: DEBUG nova.network.neutron [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Refreshing network info cache for port daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.253686] env[62974]: DEBUG nova.network.neutron [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.306323] env[62974]: DEBUG nova.network.neutron [-] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.364273] env[62974]: INFO nova.compute.manager [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Rebuilding instance [ 1035.427123] env[62974]: DEBUG nova.compute.manager [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1035.430245] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9b10ab-bba9-496a-ba98-2b10fe0962bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.442655] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523e0c2b-3990-fcf8-3407-44677ad84b6d, 'name': SearchDatastore_Task, 'duration_secs': 0.012446} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.444216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.444459] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.444691] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.444840] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.445028] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.447769] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.448086] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1035.448304] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f8dba6d-ce4a-410c-9ec7-99ddad75f3aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.450313] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f4695c1-0d63-4002-90e5-c84fc6650e3c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.456763] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1035.456763] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f056cd-7ba4-d40a-461d-37222c34396c" [ 1035.456763] env[62974]: _type = "Task" [ 1035.456763] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.461142] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.461329] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.465134] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66553aab-5671-4179-87d8-2da64b6af47e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.467239] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f056cd-7ba4-d40a-461d-37222c34396c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.471157] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1035.471157] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523651f5-890e-1290-4c52-e447b58d43b4" [ 1035.471157] env[62974]: _type = "Task" [ 1035.471157] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.479940] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523651f5-890e-1290-4c52-e447b58d43b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.487449] env[62974]: DEBUG nova.network.neutron [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance_info_cache with network_info: [{"id": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "address": "fa:16:3e:5c:65:4f", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42aa0aae-99", "ovs_interfaceid": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.489448] env[62974]: DEBUG nova.network.neutron [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.661675] env[62974]: DEBUG nova.network.neutron [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Updated VIF entry in instance network info cache for port f0ed519a-7e15-4b09-9180-2b0c2abe1052. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.662071] env[62974]: DEBUG nova.network.neutron [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Updating instance_info_cache with network_info: [{"id": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "address": "fa:16:3e:07:aa:39", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ed519a-7e", "ovs_interfaceid": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.725580] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae1579a-59e7-4a09-8cf4-8c803924022c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.734268] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d462cf-15ee-4b31-9d00-07c97bbc836f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.767263] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05a83ae-04cd-4ef8-b71d-f624ba9e4fd8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.776124] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6317ed3-c008-45b3-94d4-b2a84b89d9ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.792471] env[62974]: DEBUG nova.compute.provider_tree [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.810131] env[62974]: INFO nova.compute.manager [-] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Took 1.63 seconds to deallocate network for instance. [ 1035.969248] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f056cd-7ba4-d40a-461d-37222c34396c, 'name': SearchDatastore_Task, 'duration_secs': 0.019393} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.970076] env[62974]: DEBUG nova.network.neutron [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updated VIF entry in instance network info cache for port daa4e7d6-34e5-4455-b28f-6ee056ef2e93. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.970415] env[62974]: DEBUG nova.network.neutron [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.972632] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.972869] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.973093] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.983203] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523651f5-890e-1290-4c52-e447b58d43b4, 'name': SearchDatastore_Task, 'duration_secs': 0.026341} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.984200] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aca1beb8-1469-407b-be49-21487e153488 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.990295] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1035.990295] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bbfe0d-a801-212d-1ddb-9dbc6485d289" [ 1035.990295] env[62974]: _type = "Task" [ 1035.990295] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.993836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.995533] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.995924] env[62974]: DEBUG nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Instance network_info: |[{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1035.996348] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:29:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d417e22-6d84-4f85-9504-ae36562bc03e', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.004220] env[62974]: DEBUG oslo.service.loopingcall [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.005056] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.005592] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93482f50-fbad-4f42-96df-3756ac83d4ea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.023854] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bbfe0d-a801-212d-1ddb-9dbc6485d289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.033907] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.033907] env[62974]: value = "task-2654947" [ 1036.033907] env[62974]: _type = "Task" [ 1036.033907] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.048275] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654947, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.164755] env[62974]: DEBUG oslo_concurrency.lockutils [req-06fa2a6e-f396-4f7f-81ee-28ce7dc2c1e1 req-996f2674-a32d-416c-bcf6-97a515d2cd69 service nova] Releasing lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.296089] env[62974]: DEBUG nova.scheduler.client.report [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.314930] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.455373] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.455751] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1313c3cf-8185-4b7d-9334-55ca1ad92bbc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.467431] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1036.467431] env[62974]: value = "task-2654948" [ 1036.467431] env[62974]: _type = "Task" [ 1036.467431] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.474239] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Releasing lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.474651] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-vif-plugged-4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1036.474900] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.475155] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.475327] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.475547] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] No waiting events found dispatching network-vif-plugged-4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1036.475843] env[62974]: WARNING nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received unexpected event network-vif-plugged-4d417e22-6d84-4f85-9504-ae36562bc03e for instance with vm_state building and task_state spawning. [ 1036.476228] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-changed-4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1036.476462] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing instance network info cache due to event network-changed-4d417e22-6d84-4f85-9504-ae36562bc03e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1036.476707] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.476863] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.477085] env[62974]: DEBUG nova.network.neutron [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing network info cache for port 4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.481903] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.506994] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bbfe0d-a801-212d-1ddb-9dbc6485d289, 'name': SearchDatastore_Task, 'duration_secs': 0.038294} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.507301] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.507696] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/1aafddba-5da3-4c46-a537-3c178a1fec88.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.507836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.508038] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.508260] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d89d2fa-c652-4d87-8f6b-41feb8f386c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.510697] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13226d0c-5970-400a-8cb8-ba35bef292a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.520129] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1036.520129] env[62974]: value = "task-2654949" [ 1036.520129] env[62974]: _type = "Task" [ 1036.520129] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.526326] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.526615] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.530406] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-965974be-8168-450f-b5aa-623723a43063 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.532974] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.538044] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1036.538044] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523de9e1-565a-70b5-6dfe-cdd16b62a3c2" [ 1036.538044] env[62974]: _type = "Task" [ 1036.538044] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.548304] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654947, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.554801] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523de9e1-565a-70b5-6dfe-cdd16b62a3c2, 'name': SearchDatastore_Task, 'duration_secs': 0.01162} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.555629] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ed1760b-8313-46fb-8276-2f1c3d6f7da9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.561747] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1036.561747] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e2a1a5-6e1d-3c19-dbd4-d2787990c4b2" [ 1036.561747] env[62974]: _type = "Task" [ 1036.561747] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.570284] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e2a1a5-6e1d-3c19-dbd4-d2787990c4b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.801756] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.802227] env[62974]: DEBUG nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1036.805858] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.491s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.806074] env[62974]: DEBUG nova.objects.instance [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lazy-loading 'resources' on Instance uuid 7163e48f-8344-4837-bbfd-cbb5741eee5d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.979668] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654948, 'name': PowerOffVM_Task, 'duration_secs': 0.198838} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.980047] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.980735] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.981311] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f743500-04d3-4a90-b657-40df286f5c0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.994892] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.995180] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8684085f-97ab-42f4-ba57-35605994a814 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.032836] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654949, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.045606] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654947, 'name': CreateVM_Task, 'duration_secs': 0.862904} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.045776] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.046512] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.046731] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.047171] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1037.047608] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-650f81ac-9b62-4eca-b84f-9fb95fab98af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.054280] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1037.054280] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5202a118-1e69-a278-1705-59ebd22e3dc7" [ 1037.054280] env[62974]: _type = "Task" [ 1037.054280] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.070266] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5202a118-1e69-a278-1705-59ebd22e3dc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.080023] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e2a1a5-6e1d-3c19-dbd4-d2787990c4b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010151} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.080023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.080264] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8/8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.080461] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5307fc1-b1b9-4ddd-ba72-865fc789fefe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.088960] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.089171] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.089349] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleting the datastore file [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.089649] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cc7485b-c799-456c-898a-9e85bc5da9be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.095383] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1037.095383] env[62974]: value = "task-2654951" [ 1037.095383] env[62974]: _type = "Task" [ 1037.095383] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.101074] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1037.101074] env[62974]: value = "task-2654952" [ 1037.101074] env[62974]: _type = "Task" [ 1037.101074] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.108811] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.115076] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.228242] env[62974]: DEBUG nova.network.neutron [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updated VIF entry in instance network info cache for port 4d417e22-6d84-4f85-9504-ae36562bc03e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1037.228609] env[62974]: DEBUG nova.network.neutron [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.308802] env[62974]: DEBUG nova.compute.utils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1037.313315] env[62974]: DEBUG nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1037.313483] env[62974]: DEBUG nova.network.neutron [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.354532] env[62974]: DEBUG nova.policy [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49d8e3a243d346e8969ba6f325e7787e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9087d01b1ad748e0a66474953dfe7034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.510987] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1767b01e-9063-4c7c-86f0-09a9d4f7c3f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.532838] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance '713b503e-43b5-409c-8086-e6d36850f962' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1037.553755] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.725921} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.554232] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/1aafddba-5da3-4c46-a537-3c178a1fec88.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.554594] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.555063] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63f509be-3dae-4980-b63f-e594b7cede90 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.574328] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5202a118-1e69-a278-1705-59ebd22e3dc7, 'name': SearchDatastore_Task, 'duration_secs': 0.057718} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.576518] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.576873] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.577436] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.577436] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.577655] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.578265] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1037.578265] env[62974]: value = "task-2654953" [ 1037.578265] env[62974]: _type = "Task" [ 1037.578265] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.582124] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb180fb1-b8e4-4a14-85f7-d28e3044f84e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.604531] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.605329] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db841a3-4ed8-43ec-b26c-ff11083a0cae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.615709] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.615709] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.619556] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63465fdc-20d8-4e32-8800-a5c8bef13adb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.621058] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654951, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.625488] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb4c243-8f7d-4f5a-8463-f065412532ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.633687] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1037.633687] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524c40ee-5aa0-8dec-f7a6-b6df30779972" [ 1037.633687] env[62974]: _type = "Task" [ 1037.633687] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.634013] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251064} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.635676] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.635676] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.635676] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.678219] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df99028a-6c70-4c91-9203-5144539b33f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.685316] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524c40ee-5aa0-8dec-f7a6-b6df30779972, 'name': SearchDatastore_Task, 'duration_secs': 0.036595} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.686968] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd4061ed-1023-4b22-bfba-d9d9ae4e2be3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.693990] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be30692-3300-4f54-96ad-3405003782cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.699965] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1037.699965] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5203defb-6591-4a12-3be6-6f454d8e07e8" [ 1037.699965] env[62974]: _type = "Task" [ 1037.699965] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.718034] env[62974]: DEBUG nova.compute.provider_tree [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.723315] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5203defb-6591-4a12-3be6-6f454d8e07e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.730971] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.731253] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Received event network-vif-deleted-ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1037.731431] env[62974]: INFO nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Neutron deleted interface ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4; detaching it from the instance and deleting it from the info cache [ 1037.731595] env[62974]: DEBUG nova.network.neutron [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.734158] env[62974]: DEBUG nova.network.neutron [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Successfully created port: 5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.816124] env[62974]: DEBUG nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1038.040945] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.041331] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89d0ad29-288e-40ce-b9ce-468a37a4e606 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.052803] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1038.052803] env[62974]: value = "task-2654954" [ 1038.052803] env[62974]: _type = "Task" [ 1038.052803] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.062879] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.095294] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094422} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.095535] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.096438] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e61f0f-9432-478d-882c-3143ffa07e4f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.107970] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654951, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607103} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.117230] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8/8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.117501] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.127877] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/1aafddba-5da3-4c46-a537-3c178a1fec88.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.128309] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7ae683b-aa86-4b32-b6ea-d4d1206fd259 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.131462] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-321282cf-b73d-4c35-985e-df17b7d04c68 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.157228] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1038.157228] env[62974]: value = "task-2654955" [ 1038.157228] env[62974]: _type = "Task" [ 1038.157228] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.158754] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1038.158754] env[62974]: value = "task-2654956" [ 1038.158754] env[62974]: _type = "Task" [ 1038.158754] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.173655] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654955, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.177309] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654956, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.223349] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5203defb-6591-4a12-3be6-6f454d8e07e8, 'name': SearchDatastore_Task, 'duration_secs': 0.025737} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.224268] env[62974]: DEBUG nova.scheduler.client.report [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.227615] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.228022] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 72b0b643-7747-4dae-9d85-c8c6a573ce07/72b0b643-7747-4dae-9d85-c8c6a573ce07.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.228512] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad649c1f-0ea5-45b4-9ce2-ee4b67b6dc1d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.233993] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f938ec3-1290-46f7-8ecc-9479cb813997 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.240959] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1038.240959] env[62974]: value = "task-2654957" [ 1038.240959] env[62974]: _type = "Task" [ 1038.240959] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.249822] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1dc454-6831-4e66-b79e-68b1972e70c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.264311] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.290697] env[62974]: DEBUG nova.compute.manager [req-e1dffcef-36eb-4993-adf8-db027e829b6f req-3c30164a-9702-4a9a-92b0-01fd0694ecca service nova] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Detach interface failed, port_id=ad0d2fde-0231-4109-bcc5-b8a8dbd58fe4, reason: Instance 7163e48f-8344-4837-bbfd-cbb5741eee5d could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1038.321282] env[62974]: INFO nova.virt.block_device [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Booting with volume cf84a2af-6e27-461e-9af2-0471881dd540 at /dev/sda [ 1038.365269] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e1b2a1b-6325-462a-a2da-80086e9532f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.377616] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb95398-cc02-4c4f-b173-ae21a1ef4a3a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.414952] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80c8144e-4cc5-4d9d-a74c-0044908f5b91 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.424692] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190541c5-2cc5-4aab-b777-0d38ffbdcc7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.462404] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7131e9d8-17f4-4228-9307-d3b53a4d3e41 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.470993] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec0031f-849e-4fe2-9604-50dd22ddf6e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.486539] env[62974]: DEBUG nova.virt.block_device [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating existing volume attachment record: 8fa2b82c-03cf-4d6a-8656-c8399f86899f {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1038.565558] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654954, 'name': PowerOffVM_Task, 'duration_secs': 0.368974} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.565872] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1038.566087] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance '713b503e-43b5-409c-8086-e6d36850f962' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1038.674646] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654955, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184208} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.678375] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.678900] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654956, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.681652] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1312a6-add3-44f5-a31f-3eaaca781076 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.724441] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8/8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.727472] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.727699] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.728300] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.728300] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.728300] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.728447] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.728649] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.728807] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.728975] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.729222] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.729473] env[62974]: DEBUG nova.virt.hardware [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.729763] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9a47cde-f4d7-41b8-8da2-c3203a176bba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.749047] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.752102] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5471b6e-155e-4907-9b3f-66f9ee975f2a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.771039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c252781-6091-4b00-9634-970f47a1afcb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.775497] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1038.775497] env[62974]: value = "task-2654958" [ 1038.775497] env[62974]: _type = "Task" [ 1038.775497] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.775823] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654957, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.780589] env[62974]: INFO nova.scheduler.client.report [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Deleted allocations for instance 7163e48f-8344-4837-bbfd-cbb5741eee5d [ 1038.790750] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:55:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6e1899a-69c5-486d-bfb2-a2f12c06e8ac', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.799472] env[62974]: DEBUG oslo.service.loopingcall [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1038.802832] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.803312] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3aadd1eb-0c94-4370-9fd7-cc365a5fa679 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.823239] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.829826] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.829826] env[62974]: value = "task-2654959" [ 1038.829826] env[62974]: _type = "Task" [ 1038.829826] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.838845] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654959, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.073368] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1039.074245] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.074245] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.074245] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.074448] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.074929] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1039.075181] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1039.075721] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1039.075852] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1039.076220] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1039.076937] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1039.087170] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7f4eaac-282b-48d0-aeae-e8fd92575312 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.104974] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1039.104974] env[62974]: value = "task-2654960" [ 1039.104974] env[62974]: _type = "Task" [ 1039.104974] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.115274] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.175456] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654956, 'name': ReconfigVM_Task, 'duration_secs': 0.700104} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.176626] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/1aafddba-5da3-4c46-a537-3c178a1fec88.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.178064] env[62974]: DEBUG nova.compute.manager [req-2f0be3bc-2ef8-4bb6-82b3-b8cf6935091b req-94f32eb7-9bab-4664-997d-22e0a6d42eab service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Received event network-vif-plugged-5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1039.178346] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f0be3bc-2ef8-4bb6-82b3-b8cf6935091b req-94f32eb7-9bab-4664-997d-22e0a6d42eab service nova] Acquiring lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.178583] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f0be3bc-2ef8-4bb6-82b3-b8cf6935091b req-94f32eb7-9bab-4664-997d-22e0a6d42eab service nova] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.178805] env[62974]: DEBUG oslo_concurrency.lockutils [req-2f0be3bc-2ef8-4bb6-82b3-b8cf6935091b req-94f32eb7-9bab-4664-997d-22e0a6d42eab service nova] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.179027] env[62974]: DEBUG nova.compute.manager [req-2f0be3bc-2ef8-4bb6-82b3-b8cf6935091b req-94f32eb7-9bab-4664-997d-22e0a6d42eab service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] No waiting events found dispatching network-vif-plugged-5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1039.179248] env[62974]: WARNING nova.compute.manager [req-2f0be3bc-2ef8-4bb6-82b3-b8cf6935091b req-94f32eb7-9bab-4664-997d-22e0a6d42eab service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Received unexpected event network-vif-plugged-5d45e949-a386-4bc2-a1a4-e9232bcaeeba for instance with vm_state building and task_state block_device_mapping. [ 1039.179560] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-beea0858-512e-4f91-9f38-83ebcebf0501 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.188263] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1039.188263] env[62974]: value = "task-2654961" [ 1039.188263] env[62974]: _type = "Task" [ 1039.188263] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.198011] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654961, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.265582] env[62974]: DEBUG nova.network.neutron [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Successfully updated port: 5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.272049] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542467} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.272311] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 72b0b643-7747-4dae-9d85-c8c6a573ce07/72b0b643-7747-4dae-9d85-c8c6a573ce07.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.272566] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.272859] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85010647-0713-4eaf-bd7f-1a7fc7f87748 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.283132] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1039.283132] env[62974]: value = "task-2654962" [ 1039.283132] env[62974]: _type = "Task" [ 1039.283132] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.290912] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654958, 'name': ReconfigVM_Task, 'duration_secs': 0.395174} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.291591] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8/8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.292291] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f182007d-203d-4003-8e7b-4052a5d53ba5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.297206] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.306031] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1039.306031] env[62974]: value = "task-2654963" [ 1039.306031] env[62974]: _type = "Task" [ 1039.306031] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.306374] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d0d5146-301e-41b6-a7e4-acea43f5be1b tempest-ServersTestJSON-204793063 tempest-ServersTestJSON-204793063-project-member] Lock "7163e48f-8344-4837-bbfd-cbb5741eee5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.776s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.324536] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654963, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.340107] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654959, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.615271] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654960, 'name': ReconfigVM_Task, 'duration_secs': 0.503404} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.615589] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance '713b503e-43b5-409c-8086-e6d36850f962' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1039.701839] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654961, 'name': Rename_Task, 'duration_secs': 0.416411} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.702148] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.702407] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16a2f881-997b-441b-aa0d-a75f512160b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.710459] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1039.710459] env[62974]: value = "task-2654964" [ 1039.710459] env[62974]: _type = "Task" [ 1039.710459] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.725913] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654964, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.768305] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.768616] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.768836] env[62974]: DEBUG nova.network.neutron [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.793486] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086338} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.793759] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.794540] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4965d104-fe46-4260-8fd9-37a5c19e050d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.816983] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 72b0b643-7747-4dae-9d85-c8c6a573ce07/72b0b643-7747-4dae-9d85-c8c6a573ce07.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.820949] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b6d3f86-445d-4cf5-b56b-aa51b51006ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.841866] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654963, 'name': Rename_Task, 'duration_secs': 0.463368} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.842576] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.842840] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2abadb5d-c723-4006-a430-ff85be803c95 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.845545] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1039.845545] env[62974]: value = "task-2654965" [ 1039.845545] env[62974]: _type = "Task" [ 1039.845545] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.849040] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654959, 'name': CreateVM_Task, 'duration_secs': 0.898185} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.852085] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.852768] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.852932] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.853275] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1039.854602] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e92a14a5-761d-4a33-952d-ed9b655469fa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.857314] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1039.857314] env[62974]: value = "task-2654966" [ 1039.857314] env[62974]: _type = "Task" [ 1039.857314] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.865093] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1039.865093] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525654b0-ea23-f841-e944-6566c95c4f81" [ 1039.865093] env[62974]: _type = "Task" [ 1039.865093] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.865477] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654965, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.872121] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.880687] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525654b0-ea23-f841-e944-6566c95c4f81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.124724] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1040.124990] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1040.125465] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1040.125465] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1040.125465] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1040.125616] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1040.125813] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1040.125970] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1040.126600] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1040.126821] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1040.127017] env[62974]: DEBUG nova.virt.hardware [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1040.134842] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1040.134842] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e018dca0-c23a-431a-83a3-7c79221702a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.156058] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1040.156058] env[62974]: value = "task-2654967" [ 1040.156058] env[62974]: _type = "Task" [ 1040.156058] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.166323] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.222818] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654964, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.319163] env[62974]: DEBUG nova.network.neutron [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.359927] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654965, 'name': ReconfigVM_Task, 'duration_secs': 0.469529} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.363281] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 72b0b643-7747-4dae-9d85-c8c6a573ce07/72b0b643-7747-4dae-9d85-c8c6a573ce07.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.363940] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9276ca25-d342-48c2-9dcb-afee04d88aff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.374217] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1040.374217] env[62974]: value = "task-2654968" [ 1040.374217] env[62974]: _type = "Task" [ 1040.374217] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.374492] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654966, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.388017] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525654b0-ea23-f841-e944-6566c95c4f81, 'name': SearchDatastore_Task, 'duration_secs': 0.025972} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.388781] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.388934] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.389312] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.389392] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.389540] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.390127] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f78b3f6a-d8f3-4bdb-96ee-0e80197dccf6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.397948] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654968, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.408890] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.409128] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.409870] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eafb818-0db1-468b-9e09-e491e69893f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.417859] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1040.417859] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523281ab-dced-8523-bebc-2b5b1b94dfd1" [ 1040.417859] env[62974]: _type = "Task" [ 1040.417859] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.428255] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523281ab-dced-8523-bebc-2b5b1b94dfd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.568159] env[62974]: DEBUG nova.network.neutron [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [{"id": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "address": "fa:16:3e:8a:1f:9e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d45e949-a3", "ovs_interfaceid": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.600023] env[62974]: DEBUG nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1040.600023] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1040.600023] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1040.600023] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1040.600023] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1040.600023] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1040.600023] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1040.600023] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1040.600871] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1040.601470] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1040.601853] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1040.602687] env[62974]: DEBUG nova.virt.hardware [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1040.604646] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b801cb-7fea-43b4-9718-cc0d462e43e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.614868] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3261832-75da-41f5-af6c-a18766743713 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.665974] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654967, 'name': ReconfigVM_Task, 'duration_secs': 0.222344} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.666288] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1040.667070] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dff33a1-49db-4eaf-b984-b065d8eec09a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.693809] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 713b503e-43b5-409c-8086-e6d36850f962/713b503e-43b5-409c-8086-e6d36850f962.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.694233] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66d292cb-3ab0-4b89-9d14-cde6d79610dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.716907] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1040.716907] env[62974]: value = "task-2654969" [ 1040.716907] env[62974]: _type = "Task" [ 1040.716907] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.728780] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654969, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.731817] env[62974]: DEBUG oslo_vmware.api [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654964, 'name': PowerOnVM_Task, 'duration_secs': 0.792379} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.732081] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.732289] env[62974]: INFO nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1040.732478] env[62974]: DEBUG nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.733314] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56903ad-e8b8-4b0d-a288-f7f793dd9f5c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.869424] env[62974]: DEBUG oslo_vmware.api [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654966, 'name': PowerOnVM_Task, 'duration_secs': 0.599892} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.869694] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.869966] env[62974]: INFO nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Took 12.68 seconds to spawn the instance on the hypervisor. [ 1040.870260] env[62974]: DEBUG nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.870981] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15a3cf9-53ed-4e52-ad57-b18e8c5b1820 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.888937] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654968, 'name': Rename_Task, 'duration_secs': 0.207474} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.889491] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.889683] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ead81227-4681-4b91-a70a-80f82a772e35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.897503] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1040.897503] env[62974]: value = "task-2654970" [ 1040.897503] env[62974]: _type = "Task" [ 1040.897503] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.911235] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.932072] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523281ab-dced-8523-bebc-2b5b1b94dfd1, 'name': SearchDatastore_Task, 'duration_secs': 0.013369} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.932871] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdcd1781-86e1-45d9-9783-b78519e85c12 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.938817] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1040.938817] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224f0e5-67a2-d7a5-a857-f5cbe0adff22" [ 1040.938817] env[62974]: _type = "Task" [ 1040.938817] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.948525] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224f0e5-67a2-d7a5-a857-f5cbe0adff22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.071194] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.071577] env[62974]: DEBUG nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Instance network_info: |[{"id": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "address": "fa:16:3e:8a:1f:9e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d45e949-a3", "ovs_interfaceid": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1041.072368] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:1f:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d45e949-a386-4bc2-a1a4-e9232bcaeeba', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1041.079722] env[62974]: DEBUG oslo.service.loopingcall [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.080039] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1041.080295] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94f428a9-eb85-486f-bf8d-eb79db916ffc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.101286] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1041.101286] env[62974]: value = "task-2654971" [ 1041.101286] env[62974]: _type = "Task" [ 1041.101286] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.109464] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654971, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.209788] env[62974]: DEBUG nova.compute.manager [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Received event network-changed-5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1041.210255] env[62974]: DEBUG nova.compute.manager [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Refreshing instance network info cache due to event network-changed-5d45e949-a386-4bc2-a1a4-e9232bcaeeba. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1041.211045] env[62974]: DEBUG oslo_concurrency.lockutils [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] Acquiring lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.211045] env[62974]: DEBUG oslo_concurrency.lockutils [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] Acquired lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.211045] env[62974]: DEBUG nova.network.neutron [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Refreshing network info cache for port 5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.232782] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654969, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.251505] env[62974]: INFO nova.compute.manager [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Took 16.21 seconds to build instance. [ 1041.396118] env[62974]: INFO nova.compute.manager [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Took 17.60 seconds to build instance. [ 1041.410822] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654970, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.455084] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5224f0e5-67a2-d7a5-a857-f5cbe0adff22, 'name': SearchDatastore_Task, 'duration_secs': 0.02205} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.456896] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.457321] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.461089] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-439177c7-5352-4127-835f-059695d39d1e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.471655] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1041.471655] env[62974]: value = "task-2654972" [ 1041.471655] env[62974]: _type = "Task" [ 1041.471655] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.486111] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.614460] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654971, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.730490] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654969, 'name': ReconfigVM_Task, 'duration_secs': 0.942905} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.730805] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 713b503e-43b5-409c-8086-e6d36850f962/713b503e-43b5-409c-8086-e6d36850f962.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.731212] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance '713b503e-43b5-409c-8086-e6d36850f962' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1041.755936] env[62974]: DEBUG oslo_concurrency.lockutils [None req-73a9e6ed-ae3f-48fa-9c80-8ea49361966a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.725s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.899353] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5e00eed4-65b8-4611-ac9e-a2f382e89dc5 tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.109s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.913102] env[62974]: DEBUG oslo_vmware.api [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2654970, 'name': PowerOnVM_Task, 'duration_secs': 0.783147} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.913102] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.913102] env[62974]: INFO nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1041.913274] env[62974]: DEBUG nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.914480] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e612c45-c11a-45b0-a8ab-609f9fe7fefe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.963557] env[62974]: INFO nova.compute.manager [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Rescuing [ 1041.963829] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.964091] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.964226] env[62974]: DEBUG nova.network.neutron [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1041.990706] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654972, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.015859] env[62974]: DEBUG nova.network.neutron [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updated VIF entry in instance network info cache for port 5d45e949-a386-4bc2-a1a4-e9232bcaeeba. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.016366] env[62974]: DEBUG nova.network.neutron [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [{"id": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "address": "fa:16:3e:8a:1f:9e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d45e949-a3", "ovs_interfaceid": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.112500] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2654971, 'name': CreateVM_Task, 'duration_secs': 0.520521} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.112637] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1042.113354] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'guest_format': None, 'device_type': None, 'boot_index': 0, 'attachment_id': '8fa2b82c-03cf-4d6a-8656-c8399f86899f', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535476', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'name': 'volume-cf84a2af-6e27-461e-9af2-0471881dd540', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'serial': 'cf84a2af-6e27-461e-9af2-0471881dd540'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62974) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1042.113559] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Root volume attach. Driver type: vmdk {{(pid=62974) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1042.114398] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f892b5-c031-4b94-909a-946db1990cc7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.122570] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e5821d-5400-474b-a521-c388a3588373 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.125407] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.125630] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.125824] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.126009] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.126189] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.128359] env[62974]: INFO nova.compute.manager [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Terminating instance [ 1042.133066] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa2a048-6610-40be-9776-156af0fb5788 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.139311] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6a676f02-7e73-4065-8e10-6a6c2dd6b1bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.147793] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1042.147793] env[62974]: value = "task-2654973" [ 1042.147793] env[62974]: _type = "Task" [ 1042.147793] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.156890] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.239424] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febb56f1-4028-4a02-a6ad-eb6cac0f248d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.260702] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598b6c65-8c52-4f5c-a7e3-aafcb608926f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.283487] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance '713b503e-43b5-409c-8086-e6d36850f962' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1042.443239] env[62974]: INFO nova.compute.manager [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Took 14.71 seconds to build instance. [ 1042.483355] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654972, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57354} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.483355] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.483547] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.483794] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4073679-663f-45dd-b849-7b285f602c61 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.493315] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1042.493315] env[62974]: value = "task-2654974" [ 1042.493315] env[62974]: _type = "Task" [ 1042.493315] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.505183] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654974, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.518894] env[62974]: DEBUG oslo_concurrency.lockutils [req-1555e733-804d-4cf2-9fa0-1505e25ae49a req-118d047e-3573-4d6d-9786-64ecd1134a67 service nova] Releasing lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.636724] env[62974]: DEBUG nova.compute.manager [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1042.637516] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1042.639039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9230b398-ae61-4fd5-aa36-6238c3dc2c77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.657887] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.658634] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-701410f4-2d24-47b0-88fd-9959c7422714 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.671024] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 35%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.671024] env[62974]: DEBUG oslo_vmware.api [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1042.671024] env[62974]: value = "task-2654975" [ 1042.671024] env[62974]: _type = "Task" [ 1042.671024] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.684506] env[62974]: DEBUG oslo_vmware.api [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.700220] env[62974]: DEBUG nova.network.neutron [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Updating instance_info_cache with network_info: [{"id": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "address": "fa:16:3e:07:aa:39", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ed519a-7e", "ovs_interfaceid": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.829031] env[62974]: DEBUG nova.network.neutron [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Port 42aa0aae-99ad-43cd-96cc-af93f45297cf binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1042.945353] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9fe31f5-d8ed-48cc-8a4e-178e2b33df20 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.218s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.007065] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654974, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160885} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.007406] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.008395] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869c54c6-1bdc-4388-b581-8582d533c491 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.036178] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.036587] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f548d79-7f0c-4959-b1c5-7cf935641105 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.064012] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1043.064012] env[62974]: value = "task-2654976" [ 1043.064012] env[62974]: _type = "Task" [ 1043.064012] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.075922] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.164950] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 49%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.183861] env[62974]: DEBUG oslo_vmware.api [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654975, 'name': PowerOffVM_Task, 'duration_secs': 0.399467} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.184234] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.184405] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.184679] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c68b5573-e70f-4d02-b392-e04fd8549d09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.203528] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.332416] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1043.332416] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1043.332416] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Deleting the datastore file [datastore1] 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.337453] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-968db62e-b369-4a88-ac00-9a31fa109f4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.350784] env[62974]: DEBUG oslo_vmware.api [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for the task: (returnval){ [ 1043.350784] env[62974]: value = "task-2654978" [ 1043.350784] env[62974]: _type = "Task" [ 1043.350784] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.363573] env[62974]: DEBUG oslo_vmware.api [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.583909] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654976, 'name': ReconfigVM_Task, 'duration_secs': 0.392493} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.584409] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Reconfigured VM instance instance-0000005e to attach disk [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9/dca952df-dac9-4502-948b-24ac6fb939f9.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.585144] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1014e525-caa6-4881-87f9-532edd1188ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.596660] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1043.596660] env[62974]: value = "task-2654979" [ 1043.596660] env[62974]: _type = "Task" [ 1043.596660] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.610869] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654979, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.660756] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 62%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.781189] env[62974]: DEBUG nova.compute.manager [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-changed-4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1043.781302] env[62974]: DEBUG nova.compute.manager [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing instance network info cache due to event network-changed-4d417e22-6d84-4f85-9504-ae36562bc03e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1043.781594] env[62974]: DEBUG oslo_concurrency.lockutils [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.781690] env[62974]: DEBUG oslo_concurrency.lockutils [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.781821] env[62974]: DEBUG nova.network.neutron [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing network info cache for port 4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.851752] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "713b503e-43b5-409c-8086-e6d36850f962-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.853188] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.853188] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.874810] env[62974]: DEBUG oslo_vmware.api [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.112275] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654979, 'name': Rename_Task, 'duration_secs': 0.182644} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.112594] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.112860] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e5a6162-c3bc-400a-8323-2a1b20b7b575 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.123774] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1044.123774] env[62974]: value = "task-2654980" [ 1044.123774] env[62974]: _type = "Task" [ 1044.123774] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.136236] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.163485] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 76%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.375468] env[62974]: DEBUG oslo_vmware.api [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Task: {'id': task-2654978, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.533189} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.376552] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.376729] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.376914] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.377379] env[62974]: INFO nova.compute.manager [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1044.377379] env[62974]: DEBUG oslo.service.loopingcall [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.380059] env[62974]: DEBUG nova.compute.manager [-] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1044.380059] env[62974]: DEBUG nova.network.neutron [-] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1044.637200] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654980, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.643482] env[62974]: DEBUG nova.network.neutron [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updated VIF entry in instance network info cache for port 4d417e22-6d84-4f85-9504-ae36562bc03e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.643482] env[62974]: DEBUG nova.network.neutron [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.665925] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.751401] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.751401] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a860f07d-3607-4292-bd21-f44a2c938805 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.759905] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1044.759905] env[62974]: value = "task-2654981" [ 1044.759905] env[62974]: _type = "Task" [ 1044.759905] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.771880] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654981, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.917752] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.917948] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.918154] env[62974]: DEBUG nova.network.neutron [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.136013] env[62974]: DEBUG oslo_vmware.api [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2654980, 'name': PowerOnVM_Task, 'duration_secs': 0.598649} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.136386] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.136545] env[62974]: DEBUG nova.compute.manager [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.137360] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9bcc2b-8a85-4fa9-bea6-0de1dad7f260 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.146681] env[62974]: DEBUG oslo_concurrency.lockutils [req-bf1a9040-2edc-4da7-956b-7f53974e8a0e req-deac33bb-5c65-4c53-a3da-1ade6e81f5a1 service nova] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.160933] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 97%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.270413] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654981, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.509270] env[62974]: DEBUG nova.network.neutron [-] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.638540] env[62974]: DEBUG nova.network.neutron [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance_info_cache with network_info: [{"id": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "address": "fa:16:3e:5c:65:4f", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42aa0aae-99", "ovs_interfaceid": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.657046] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.657288] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.657466] env[62974]: DEBUG nova.objects.instance [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1045.666173] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 98%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.769933] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654981, 'name': PowerOffVM_Task, 'duration_secs': 0.549979} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.770304] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1045.771341] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e889ee-9a8f-417a-b20f-ad175059db66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.791849] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272b6fd9-b658-4f0d-a61e-9ad2b0e32270 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.807756] env[62974]: DEBUG nova.compute.manager [req-037a225a-b605-4cd8-a9dd-ac7f557757c9 req-5c7bef39-d2df-4c88-8f15-eba8d606cfbd service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received event network-vif-deleted-ec393463-0f9a-4e87-ac77-0f8294550bfd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1045.808029] env[62974]: DEBUG nova.compute.manager [req-037a225a-b605-4cd8-a9dd-ac7f557757c9 req-5c7bef39-d2df-4c88-8f15-eba8d606cfbd service nova] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Received event network-vif-deleted-04938e84-a6a7-41b3-8f7d-f5f881420f7a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1045.823815] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.824500] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7841a3fa-ea1d-4090-bf37-7b264fbadf89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.832794] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1045.832794] env[62974]: value = "task-2654982" [ 1045.832794] env[62974]: _type = "Task" [ 1045.832794] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.845371] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1045.845572] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.845809] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.845956] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.846149] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1045.846387] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-855bb090-e88d-4b84-b71a-3e3d2d927a49 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.859729] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1045.859927] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1045.860687] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d620408f-3ecf-4187-873d-1066e703a054 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.866787] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1045.866787] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52405e1a-f850-d49a-52f6-2cffb6f5ad28" [ 1045.866787] env[62974]: _type = "Task" [ 1045.866787] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.875133] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52405e1a-f850-d49a-52f6-2cffb6f5ad28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.012592] env[62974]: INFO nova.compute.manager [-] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Took 1.63 seconds to deallocate network for instance. [ 1046.141127] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.165254] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task} progress is 98%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.379064] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52405e1a-f850-d49a-52f6-2cffb6f5ad28, 'name': SearchDatastore_Task, 'duration_secs': 0.037929} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.379824] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-888d1ab2-a372-4008-adf7-b7113a01c66b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.386658] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1046.386658] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5298f155-064b-441f-aa38-1b4abf458079" [ 1046.386658] env[62974]: _type = "Task" [ 1046.386658] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.396275] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5298f155-064b-441f-aa38-1b4abf458079, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.519419] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.662891] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654973, 'name': RelocateVM_Task, 'duration_secs': 4.207204} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.663262] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1046.663484] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535476', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'name': 'volume-cf84a2af-6e27-461e-9af2-0471881dd540', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'serial': 'cf84a2af-6e27-461e-9af2-0471881dd540'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1046.664384] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa3c932-6b9d-456a-ac5f-20274713d059 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.667886] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3979582d-4e12-47f3-ac1a-59e0d54ed760 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.669765] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.151s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.670042] env[62974]: DEBUG nova.objects.instance [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lazy-loading 'resources' on Instance uuid 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.671781] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cd2d69-942e-4c27-acf7-4b5d0eff08d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.690020] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f51cc7-563d-4853-9cc9-65cf9fbcd6bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.707348] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd32289c-cc0e-4175-b822-2018bad7d259 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.728034] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-cf84a2af-6e27-461e-9af2-0471881dd540/volume-cf84a2af-6e27-461e-9af2-0471881dd540.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.729105] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd3f57f6-7dee-4612-8f76-4006ecccd0b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.746496] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance '713b503e-43b5-409c-8086-e6d36850f962' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1046.756764] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1046.756764] env[62974]: value = "task-2654983" [ 1046.756764] env[62974]: _type = "Task" [ 1046.756764] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.765815] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654983, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.899127] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5298f155-064b-441f-aa38-1b4abf458079, 'name': SearchDatastore_Task, 'duration_secs': 0.02974} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.899127] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.899127] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. {{(pid=62974) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1046.899510] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebe26339-ceb3-4346-a8ac-9ef9a855d0b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.910197] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1046.910197] env[62974]: value = "task-2654984" [ 1046.910197] env[62974]: _type = "Task" [ 1046.910197] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.920711] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.255479] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.256079] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1c0643c-5611-4fb9-b214-96a17563fc48 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.271078] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1047.271078] env[62974]: value = "task-2654985" [ 1047.271078] env[62974]: _type = "Task" [ 1047.271078] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.277631] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654983, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.288811] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.422647] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654984, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.447464] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d96342d-bfbf-4259-9ee9-f5a6aeb839b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.458960] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f43a050-33ce-4847-aaa6-4fa8216b53b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.493050] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176bc6ae-f6e1-4d6b-859a-6b1cb8eaef94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.502796] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3eb238-6a14-4dbd-acfc-49701d9ac166 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.518459] env[62974]: DEBUG nova.compute.provider_tree [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.770249] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654983, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.788462] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654985, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.901764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.902111] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.902329] env[62974]: INFO nova.compute.manager [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Shelving [ 1047.922894] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.591943} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.923183] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk. [ 1047.924018] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52954f1-fd02-4b12-be30-61e953917747 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.960890] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.961241] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85dc6f64-e23c-4c8b-a06d-edfd6402a120 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.983356] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1047.983356] env[62974]: value = "task-2654986" [ 1047.983356] env[62974]: _type = "Task" [ 1047.983356] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.994885] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654986, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.022824] env[62974]: DEBUG nova.scheduler.client.report [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.275013] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654983, 'name': ReconfigVM_Task, 'duration_secs': 1.365096} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.275320] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-cf84a2af-6e27-461e-9af2-0471881dd540/volume-cf84a2af-6e27-461e-9af2-0471881dd540.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.280140] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6743cded-694b-493e-9c93-626ce9ac8755 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.299703] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654985, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.301171] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1048.301171] env[62974]: value = "task-2654987" [ 1048.301171] env[62974]: _type = "Task" [ 1048.301171] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.494743] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654986, 'name': ReconfigVM_Task, 'duration_secs': 0.312886} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.495073] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88/807f8582-499f-47ee-9d5b-755c9f39bc39-rescue.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.495984] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05470f8e-0fc8-4f7b-b659-56ca0839de1f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.521542] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c7159c7-b59f-4022-87f5-6db34af17472 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.532611] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.543281] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1048.543281] env[62974]: value = "task-2654988" [ 1048.543281] env[62974]: _type = "Task" [ 1048.543281] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.555828] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654988, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.558164] env[62974]: INFO nova.scheduler.client.report [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Deleted allocations for instance 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8 [ 1048.791325] env[62974]: DEBUG oslo_vmware.api [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2654985, 'name': PowerOnVM_Task, 'duration_secs': 1.399047} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.791615] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.791802] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-e800dad9-62fc-495f-ab45-ecfdf630ae91 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance '713b503e-43b5-409c-8086-e6d36850f962' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1048.811877] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654987, 'name': ReconfigVM_Task, 'duration_secs': 0.305517} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.811877] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535476', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'name': 'volume-cf84a2af-6e27-461e-9af2-0471881dd540', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'serial': 'cf84a2af-6e27-461e-9af2-0471881dd540'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1048.811877] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13f93f30-e54a-441f-a8a0-405fe9cbe443 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.820058] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1048.820058] env[62974]: value = "task-2654989" [ 1048.820058] env[62974]: _type = "Task" [ 1048.820058] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.835430] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654989, 'name': Rename_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.911776] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.912109] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-494d2e6c-a9a1-492e-99a2-cdc3fcb1d2dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.919771] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1048.919771] env[62974]: value = "task-2654990" [ 1048.919771] env[62974]: _type = "Task" [ 1048.919771] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.930050] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.055858] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654988, 'name': ReconfigVM_Task, 'duration_secs': 0.203375} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.056102] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.056386] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dde6e9c7-9fc3-410c-8cbe-b72d44c12401 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.064114] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1049.064114] env[62974]: value = "task-2654991" [ 1049.064114] env[62974]: _type = "Task" [ 1049.064114] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.069810] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b6d76a9-94e2-4316-87cd-d2ff9ef9a80b tempest-ServersTestMultiNic-664197129 tempest-ServersTestMultiNic-664197129-project-member] Lock "8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.944s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.076910] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.330779] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654989, 'name': Rename_Task, 'duration_secs': 0.286588} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.331022] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.331297] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9adc7183-0258-46df-834d-5ee873200068 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.337706] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1049.337706] env[62974]: value = "task-2654992" [ 1049.337706] env[62974]: _type = "Task" [ 1049.337706] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.353527] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.429849] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654990, 'name': PowerOffVM_Task, 'duration_secs': 0.34499} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.430176] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.430998] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820cabbf-c54e-4369-888d-1919cee019cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.451457] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626f4903-60b2-4fa3-af62-5db6a833f750 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.574537] env[62974]: DEBUG oslo_vmware.api [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654991, 'name': PowerOnVM_Task, 'duration_secs': 0.419694} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.574829] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.577428] env[62974]: DEBUG nova.compute.manager [None req-4f2b899d-f05f-4157-b953-739db67c5764 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.578317] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8001f54e-fd32-424c-8663-edc3f531c9eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.849420] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654992, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.966018] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1049.966018] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3635fb13-2825-4853-bf9d-90e10718335b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.974516] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1049.974516] env[62974]: value = "task-2654993" [ 1049.974516] env[62974]: _type = "Task" [ 1049.974516] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.984396] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654993, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.350040] env[62974]: DEBUG oslo_vmware.api [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2654992, 'name': PowerOnVM_Task, 'duration_secs': 0.804159} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.350040] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.350273] env[62974]: INFO nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Took 9.75 seconds to spawn the instance on the hypervisor. [ 1050.350432] env[62974]: DEBUG nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.351225] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdacb35-1d80-4652-9bdd-192b90a5cf73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.419042] env[62974]: INFO nova.compute.manager [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Unrescuing [ 1050.419217] env[62974]: DEBUG oslo_concurrency.lockutils [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.419375] env[62974]: DEBUG oslo_concurrency.lockutils [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquired lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.419539] env[62974]: DEBUG nova.network.neutron [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.485870] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654993, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.686197] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "713b503e-43b5-409c-8086-e6d36850f962" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.686197] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.686197] env[62974]: DEBUG nova.compute.manager [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Going to confirm migration 6 {{(pid=62974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1050.873296] env[62974]: INFO nova.compute.manager [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Took 16.36 seconds to build instance. [ 1050.986826] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654993, 'name': CreateSnapshot_Task, 'duration_secs': 0.953389} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.987126] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1050.987900] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d73927c-2af6-40bc-89f6-3cd1fa5348af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.167423] env[62974]: DEBUG nova.network.neutron [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Updating instance_info_cache with network_info: [{"id": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "address": "fa:16:3e:07:aa:39", "network": {"id": "48e6d8c5-db98-4d1e-afac-de705f709283", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2027024350-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "21909beb1faa4a2c994925764408480f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ed519a-7e", "ovs_interfaceid": "f0ed519a-7e15-4b09-9180-2b0c2abe1052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.246172] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.246172] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquired lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.246172] env[62974]: DEBUG nova.network.neutron [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.246172] env[62974]: DEBUG nova.objects.instance [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'info_cache' on Instance uuid 713b503e-43b5-409c-8086-e6d36850f962 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.375680] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4132bea1-f24e-4405-aba9-5b08939436ce tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.872s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.511034] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1051.511034] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8c38244e-bb04-4af2-b898-83c1a7dfe67d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.520039] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1051.520039] env[62974]: value = "task-2654994" [ 1051.520039] env[62974]: _type = "Task" [ 1051.520039] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.531339] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654994, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.668970] env[62974]: DEBUG oslo_concurrency.lockutils [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Releasing lock "refresh_cache-1aafddba-5da3-4c46-a537-3c178a1fec88" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.669703] env[62974]: DEBUG nova.objects.instance [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lazy-loading 'flavor' on Instance uuid 1aafddba-5da3-4c46-a537-3c178a1fec88 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.033256] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654994, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.159535] env[62974]: DEBUG nova.compute.manager [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Received event network-changed-7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1052.159887] env[62974]: DEBUG nova.compute.manager [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Refreshing instance network info cache due to event network-changed-7e4b21ba-e0f2-4104-8f46-57871fd6ed16. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1052.160341] env[62974]: DEBUG oslo_concurrency.lockutils [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] Acquiring lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.160568] env[62974]: DEBUG oslo_concurrency.lockutils [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] Acquired lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.160839] env[62974]: DEBUG nova.network.neutron [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Refreshing network info cache for port 7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.178641] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c3cca0-c2ce-4414-a841-3caf7e36b8f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.201320] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.201704] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e1c3576-30c0-42ff-9fb9-4066a75efb26 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.209965] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1052.209965] env[62974]: value = "task-2654995" [ 1052.209965] env[62974]: _type = "Task" [ 1052.209965] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.219362] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.469180] env[62974]: DEBUG nova.network.neutron [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance_info_cache with network_info: [{"id": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "address": "fa:16:3e:5c:65:4f", "network": {"id": "405f3292-8176-4509-a83b-0054497526bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1021148369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14dd4a9a77ad40458d40bb82ac4b90a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42aa0aae-99", "ovs_interfaceid": "42aa0aae-99ad-43cd-96cc-af93f45297cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.532176] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654994, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.723279] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654995, 'name': PowerOffVM_Task, 'duration_secs': 0.269418} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.723583] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.729304] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1052.731068] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5044d85d-d053-4109-977f-6d2e6dfd4ea2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.758955] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1052.758955] env[62974]: value = "task-2654996" [ 1052.758955] env[62974]: _type = "Task" [ 1052.758955] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.777607] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654996, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.898085] env[62974]: DEBUG nova.network.neutron [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updated VIF entry in instance network info cache for port 7e4b21ba-e0f2-4104-8f46-57871fd6ed16. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.898553] env[62974]: DEBUG nova.network.neutron [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [{"id": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "address": "fa:16:3e:34:87:aa", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b21ba-e0", "ovs_interfaceid": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.973209] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Releasing lock "refresh_cache-713b503e-43b5-409c-8086-e6d36850f962" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.973601] env[62974]: DEBUG nova.objects.instance [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lazy-loading 'migration_context' on Instance uuid 713b503e-43b5-409c-8086-e6d36850f962 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.032916] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654994, 'name': CloneVM_Task} progress is 95%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.270582] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654996, 'name': ReconfigVM_Task, 'duration_secs': 0.30336} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.270864] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1053.271075] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.271343] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d973f91-d10f-4ff0-8cee-27a4289cbdb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.280293] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1053.280293] env[62974]: value = "task-2654997" [ 1053.280293] env[62974]: _type = "Task" [ 1053.280293] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.289680] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.401877] env[62974]: DEBUG oslo_concurrency.lockutils [req-feb01bea-439d-4314-8fa5-0ac01d59ecef req-aa3de3b8-fe88-44eb-a573-b8e45f8156f6 service nova] Releasing lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.477073] env[62974]: DEBUG nova.objects.base [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Object Instance<713b503e-43b5-409c-8086-e6d36850f962> lazy-loaded attributes: info_cache,migration_context {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1053.478016] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac7efcb-2846-474b-bebe-3c4f893c31b0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.499211] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc1b4204-1b50-4421-b177-4a933eaff624 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.505600] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1053.505600] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527a530c-fefe-4079-3548-132e40217c96" [ 1053.505600] env[62974]: _type = "Task" [ 1053.505600] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.514133] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527a530c-fefe-4079-3548-132e40217c96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.531336] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2654994, 'name': CloneVM_Task, 'duration_secs': 1.718299} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.531336] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Created linked-clone VM from snapshot [ 1053.532069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f81c7f-ef5d-42be-8c6b-13ca1b0febfe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.539938] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Uploading image ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1053.566121] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1053.566121] env[62974]: value = "vm-535485" [ 1053.566121] env[62974]: _type = "VirtualMachine" [ 1053.566121] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1053.566435] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b1aefc1b-430d-4968-879b-e9c2f958cb51 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.575718] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lease: (returnval){ [ 1053.575718] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d470a-9438-3af3-0b92-aff21c292491" [ 1053.575718] env[62974]: _type = "HttpNfcLease" [ 1053.575718] env[62974]: } obtained for exporting VM: (result){ [ 1053.575718] env[62974]: value = "vm-535485" [ 1053.575718] env[62974]: _type = "VirtualMachine" [ 1053.575718] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1053.576023] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the lease: (returnval){ [ 1053.576023] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d470a-9438-3af3-0b92-aff21c292491" [ 1053.576023] env[62974]: _type = "HttpNfcLease" [ 1053.576023] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1053.582769] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1053.582769] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d470a-9438-3af3-0b92-aff21c292491" [ 1053.582769] env[62974]: _type = "HttpNfcLease" [ 1053.582769] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1053.736564] env[62974]: DEBUG nova.compute.manager [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1053.792153] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654997, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.018295] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527a530c-fefe-4079-3548-132e40217c96, 'name': SearchDatastore_Task, 'duration_secs': 0.010168} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.018518] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.018739] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.084584] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1054.084584] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d470a-9438-3af3-0b92-aff21c292491" [ 1054.084584] env[62974]: _type = "HttpNfcLease" [ 1054.084584] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1054.084881] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1054.084881] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527d470a-9438-3af3-0b92-aff21c292491" [ 1054.084881] env[62974]: _type = "HttpNfcLease" [ 1054.084881] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1054.085639] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d1ec96-ebcb-4393-8f83-26ea74eb343c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.093341] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207c091-2980-86cf-0601-83a838469f6f/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1054.093510] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207c091-2980-86cf-0601-83a838469f6f/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1054.189269] env[62974]: DEBUG nova.compute.manager [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Received event network-changed-5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1054.189506] env[62974]: DEBUG nova.compute.manager [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Refreshing instance network info cache due to event network-changed-5d45e949-a386-4bc2-a1a4-e9232bcaeeba. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1054.189978] env[62974]: DEBUG oslo_concurrency.lockutils [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] Acquiring lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.190191] env[62974]: DEBUG oslo_concurrency.lockutils [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] Acquired lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.190406] env[62974]: DEBUG nova.network.neutron [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Refreshing network info cache for port 5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1054.257874] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.291685] env[62974]: DEBUG oslo_vmware.api [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654997, 'name': PowerOnVM_Task, 'duration_secs': 0.54808} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.291957] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.292186] env[62974]: DEBUG nova.compute.manager [None req-28f29ed7-66b1-4d6d-a2cc-08fd482256e0 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.292914] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fdd135-daad-4e49-892b-d8bccbc60c30 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.315111] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-90092679-baa3-43c2-b65e-cacda3cd7403 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.739442] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77325ef6-e7eb-4f70-b4bd-70c62707274c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.749267] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d812e64-151c-4d71-bf0f-3a246538a78a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.786687] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19d7fd2-211f-4337-a276-31d6d5c166b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.795685] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cf185f-f68e-41d9-982b-4e5f9c762372 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.814254] env[62974]: DEBUG nova.compute.provider_tree [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.944254] env[62974]: DEBUG nova.network.neutron [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updated VIF entry in instance network info cache for port 5d45e949-a386-4bc2-a1a4-e9232bcaeeba. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.944633] env[62974]: DEBUG nova.network.neutron [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [{"id": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "address": "fa:16:3e:8a:1f:9e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d45e949-a3", "ovs_interfaceid": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.168438] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "1aafddba-5da3-4c46-a537-3c178a1fec88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.168712] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.168958] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "1aafddba-5da3-4c46-a537-3c178a1fec88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.169164] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.169337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.171441] env[62974]: INFO nova.compute.manager [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Terminating instance [ 1055.317163] env[62974]: DEBUG nova.scheduler.client.report [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1055.447326] env[62974]: DEBUG oslo_concurrency.lockutils [req-a5fbea90-41fc-4986-8fdc-ba84ca1ee49d req-3db29d37-32e6-44d1-9b21-395a8fe2a305 service nova] Releasing lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.675442] env[62974]: DEBUG nova.compute.manager [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.675608] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.676571] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c50481-7788-493e-8367-8b0fdcaec9fe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.685324] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.685769] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2667d1d7-5b8e-48f8-bbc8-29f99d244074 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.693618] env[62974]: DEBUG oslo_vmware.api [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1055.693618] env[62974]: value = "task-2654999" [ 1055.693618] env[62974]: _type = "Task" [ 1055.693618] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.702871] env[62974]: DEBUG oslo_vmware.api [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.205105] env[62974]: DEBUG oslo_vmware.api [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2654999, 'name': PowerOffVM_Task, 'duration_secs': 0.203076} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.205105] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.205329] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.205496] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5519a3f9-4ec5-4aba-b468-559db098e646 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.281193] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.281396] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.281584] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Deleting the datastore file [datastore1] 1aafddba-5da3-4c46-a537-3c178a1fec88 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.281864] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9980f25-4365-4716-8432-93ff7fa64738 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.288960] env[62974]: DEBUG oslo_vmware.api [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1056.288960] env[62974]: value = "task-2655001" [ 1056.288960] env[62974]: _type = "Task" [ 1056.288960] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.297314] env[62974]: DEBUG oslo_vmware.api [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2655001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.329104] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.310s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.332195] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.075s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.799356] env[62974]: DEBUG oslo_vmware.api [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2655001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188125} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.799683] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.800025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.800257] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.800442] env[62974]: INFO nova.compute.manager [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1056.800713] env[62974]: DEBUG oslo.service.loopingcall [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.800911] env[62974]: DEBUG nova.compute.manager [-] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.801015] env[62974]: DEBUG nova.network.neutron [-] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.839153] env[62974]: INFO nova.compute.claims [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.897034] env[62974]: INFO nova.scheduler.client.report [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted allocation for migration 9ff5649a-0b47-462b-99bb-9611fad0581f [ 1057.140623] env[62974]: DEBUG nova.compute.manager [req-f064ba3d-6f59-452a-8790-71230b480f53 req-a82d8dae-a7f3-4ee8-9f24-ab2737b58129 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Received event network-vif-deleted-f0ed519a-7e15-4b09-9180-2b0c2abe1052 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1057.140877] env[62974]: INFO nova.compute.manager [req-f064ba3d-6f59-452a-8790-71230b480f53 req-a82d8dae-a7f3-4ee8-9f24-ab2737b58129 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Neutron deleted interface f0ed519a-7e15-4b09-9180-2b0c2abe1052; detaching it from the instance and deleting it from the info cache [ 1057.141079] env[62974]: DEBUG nova.network.neutron [req-f064ba3d-6f59-452a-8790-71230b480f53 req-a82d8dae-a7f3-4ee8-9f24-ab2737b58129 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.348085] env[62974]: INFO nova.compute.resource_tracker [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating resource usage from migration 9e89606c-d22f-4fbc-9789-776bf08c4296 [ 1057.402904] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.718s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.525543] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a336a1-9d7b-4f8b-ac81-54a0060cea7f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.533926] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e83c0cc-64a8-4743-ac1c-0e5ecc2c9f6a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.564449] env[62974]: DEBUG nova.network.neutron [-] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.566784] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba31684-9fd3-489c-b561-984739e33b61 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.575494] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c58b6da-4697-4ccb-84fe-5406ae48f3f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.591086] env[62974]: DEBUG nova.compute.provider_tree [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.643796] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a227f11d-6332-481a-9ccd-c61ebcdf2b4b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.655170] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d43d6e0-4483-4da4-8bbd-b9f0db99bbf8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.690949] env[62974]: DEBUG nova.compute.manager [req-f064ba3d-6f59-452a-8790-71230b480f53 req-a82d8dae-a7f3-4ee8-9f24-ab2737b58129 service nova] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Detach interface failed, port_id=f0ed519a-7e15-4b09-9180-2b0c2abe1052, reason: Instance 1aafddba-5da3-4c46-a537-3c178a1fec88 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1058.068077] env[62974]: INFO nova.compute.manager [-] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Took 1.27 seconds to deallocate network for instance. [ 1058.093912] env[62974]: DEBUG nova.scheduler.client.report [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.574299] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.600073] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.268s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.600073] env[62974]: INFO nova.compute.manager [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Migrating [ 1058.606292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.032s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.606528] env[62974]: DEBUG nova.objects.instance [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lazy-loading 'resources' on Instance uuid 1aafddba-5da3-4c46-a537-3c178a1fec88 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.709038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "713b503e-43b5-409c-8086-e6d36850f962" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.709038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.709250] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "713b503e-43b5-409c-8086-e6d36850f962-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.709512] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.709727] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.712225] env[62974]: INFO nova.compute.manager [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Terminating instance [ 1059.114465] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.114655] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.114832] env[62974]: DEBUG nova.network.neutron [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.220636] env[62974]: DEBUG nova.compute.manager [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1059.220876] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1059.222071] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77ef3a3-6fbd-4c96-9fb2-68ee586c7523 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.230767] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1059.230996] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a3e957f-ef79-423c-b581-b2fe4da1d1de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.240821] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1059.240821] env[62974]: value = "task-2655002" [ 1059.240821] env[62974]: _type = "Task" [ 1059.240821] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.251949] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2655002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.307076] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ecd17a-1075-4cb2-bb4e-790077090ab8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.315417] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b43ebbd-d0bc-4b41-bc45-abbd24e2ef29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.347518] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa362315-f295-468d-bf2d-7e9b49b4c66e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.356203] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d353a55f-76d4-40e1-a8bb-e53c2deeeb09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.370625] env[62974]: DEBUG nova.compute.provider_tree [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.754870] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2655002, 'name': PowerOffVM_Task, 'duration_secs': 0.338787} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.755249] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.755373] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.755631] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-566476d2-2ef6-4904-9b6e-51555c92da19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.838991] env[62974]: DEBUG nova.network.neutron [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [{"id": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "address": "fa:16:3e:8a:1f:9e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d45e949-a3", "ovs_interfaceid": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.847202] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.847422] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.847600] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleting the datastore file [datastore2] 713b503e-43b5-409c-8086-e6d36850f962 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.847862] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93eb2e39-7047-4e3d-a0ae-29ab03e0991d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.856234] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for the task: (returnval){ [ 1059.856234] env[62974]: value = "task-2655004" [ 1059.856234] env[62974]: _type = "Task" [ 1059.856234] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.866246] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2655004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.873766] env[62974]: DEBUG nova.scheduler.client.report [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.342347] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.368084] env[62974]: DEBUG oslo_vmware.api [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Task: {'id': task-2655004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180728} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.368269] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1060.368451] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1060.368631] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1060.368801] env[62974]: INFO nova.compute.manager [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1060.369052] env[62974]: DEBUG oslo.service.loopingcall [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1060.369251] env[62974]: DEBUG nova.compute.manager [-] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1060.369348] env[62974]: DEBUG nova.network.neutron [-] [instance: 713b503e-43b5-409c-8086-e6d36850f962] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1060.379090] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.772s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.404995] env[62974]: INFO nova.scheduler.client.report [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Deleted allocations for instance 1aafddba-5da3-4c46-a537-3c178a1fec88 [ 1060.733279] env[62974]: DEBUG nova.compute.manager [req-dc08232c-0002-4482-b6cf-6820e893cf27 req-bfa10505-ab61-4fca-ab1a-2a83cbf3b1c5 service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Received event network-vif-deleted-42aa0aae-99ad-43cd-96cc-af93f45297cf {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1060.733856] env[62974]: INFO nova.compute.manager [req-dc08232c-0002-4482-b6cf-6820e893cf27 req-bfa10505-ab61-4fca-ab1a-2a83cbf3b1c5 service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Neutron deleted interface 42aa0aae-99ad-43cd-96cc-af93f45297cf; detaching it from the instance and deleting it from the info cache [ 1060.734588] env[62974]: DEBUG nova.network.neutron [req-dc08232c-0002-4482-b6cf-6820e893cf27 req-bfa10505-ab61-4fca-ab1a-2a83cbf3b1c5 service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.912726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-67861784-d09f-4033-b680-ccd8c3e1250a tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "1aafddba-5da3-4c46-a537-3c178a1fec88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.743s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.215457] env[62974]: DEBUG nova.network.neutron [-] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.237579] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e36efa3-a1e0-42e1-992c-3b721b1e7496 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.251437] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b7b6d3-a77b-4806-947a-6c9c0eeb50f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.287876] env[62974]: DEBUG nova.compute.manager [req-dc08232c-0002-4482-b6cf-6820e893cf27 req-bfa10505-ab61-4fca-ab1a-2a83cbf3b1c5 service nova] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Detach interface failed, port_id=42aa0aae-99ad-43cd-96cc-af93f45297cf, reason: Instance 713b503e-43b5-409c-8086-e6d36850f962 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1061.718707] env[62974]: INFO nova.compute.manager [-] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Took 1.35 seconds to deallocate network for instance. [ 1061.793577] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "6d6331f3-327a-4f11-973e-37c1a3d9701c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.794148] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.794148] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "6d6331f3-327a-4f11-973e-37c1a3d9701c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.794745] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.794934] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.796894] env[62974]: INFO nova.compute.manager [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Terminating instance [ 1061.858212] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7894b5-8337-4a05-92ec-d062bb890a56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.878804] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1061.913301] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.913617] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.226551] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.226884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.227124] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.250423] env[62974]: INFO nova.scheduler.client.report [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Deleted allocations for instance 713b503e-43b5-409c-8086-e6d36850f962 [ 1062.301327] env[62974]: DEBUG nova.compute.manager [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1062.301588] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1062.303065] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc6e3b9-6569-4d15-a782-325c4cdae637 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.313337] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1062.313598] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ffe13a2-1d83-482d-b03e-017c6174375d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.321092] env[62974]: DEBUG oslo_vmware.api [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1062.321092] env[62974]: value = "task-2655005" [ 1062.321092] env[62974]: _type = "Task" [ 1062.321092] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.330388] env[62974]: DEBUG oslo_vmware.api [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2655005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.386195] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1062.386545] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-946f429d-ffff-4b40-951d-a606f85bae00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.395567] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1062.395567] env[62974]: value = "task-2655006" [ 1062.395567] env[62974]: _type = "Task" [ 1062.395567] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.406286] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.416563] env[62974]: DEBUG nova.compute.utils [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1062.761060] env[62974]: DEBUG oslo_concurrency.lockutils [None req-569233f6-c0c7-4200-9ff6-ec6c20bfdef4 tempest-DeleteServersTestJSON-729252124 tempest-DeleteServersTestJSON-729252124-project-member] Lock "713b503e-43b5-409c-8086-e6d36850f962" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.052s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.832419] env[62974]: DEBUG oslo_vmware.api [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2655005, 'name': PowerOffVM_Task, 'duration_secs': 0.237808} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.832640] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.833273] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1062.833273] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd5ab3b1-8e02-4b74-8930-0622f55394cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.906382] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655006, 'name': PowerOffVM_Task, 'duration_secs': 0.262918} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.906642] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.906847] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1062.917052] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1062.917412] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1062.917544] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Deleting the datastore file [datastore2] 6d6331f3-327a-4f11-973e-37c1a3d9701c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.917815] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8112660a-69a4-40de-aaec-d2db275d5189 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.920291] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.925762] env[62974]: DEBUG oslo_vmware.api [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for the task: (returnval){ [ 1062.925762] env[62974]: value = "task-2655008" [ 1062.925762] env[62974]: _type = "Task" [ 1062.925762] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.934906] env[62974]: DEBUG oslo_vmware.api [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2655008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.285411] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207c091-2980-86cf-0601-83a838469f6f/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1063.286463] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ac5399-7346-414d-a2b1-ace8e725ced6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.293219] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207c091-2980-86cf-0601-83a838469f6f/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1063.293463] env[62974]: ERROR oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207c091-2980-86cf-0601-83a838469f6f/disk-0.vmdk due to incomplete transfer. [ 1063.293601] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b376bdfc-e793-4b1b-a0bf-745850a3fdfb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.301222] env[62974]: DEBUG oslo_vmware.rw_handles [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207c091-2980-86cf-0601-83a838469f6f/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1063.301429] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Uploaded image ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1063.303906] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1063.304161] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7ee41b16-2574-41a0-9f41-3a18cc232aac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.310299] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1063.310299] env[62974]: value = "task-2655010" [ 1063.310299] env[62974]: _type = "Task" [ 1063.310299] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.318496] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655010, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.413940] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1063.414228] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.414511] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1063.414731] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.414887] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1063.415073] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1063.415291] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1063.415455] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1063.415621] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1063.415781] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1063.415951] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1063.421460] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17a7b23f-2746-42b9-9e54-1ae671ed1053 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.441943] env[62974]: DEBUG oslo_vmware.api [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Task: {'id': task-2655008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293203} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.443358] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1063.443560] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1063.443738] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1063.443911] env[62974]: INFO nova.compute.manager [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1063.444175] env[62974]: DEBUG oslo.service.loopingcall [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1063.444433] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1063.444433] env[62974]: value = "task-2655011" [ 1063.444433] env[62974]: _type = "Task" [ 1063.444433] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.444628] env[62974]: DEBUG nova.compute.manager [-] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1063.444721] env[62974]: DEBUG nova.network.neutron [-] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1063.456012] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655011, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.739489] env[62974]: DEBUG nova.compute.manager [req-bc045a5c-0990-426e-8245-2b0949b54cf0 req-6394828a-cbe4-4bbc-8605-71577eaad23c service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Received event network-vif-deleted-f60dfb07-76b9-46c7-b661-8dd02b1e8bb7 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1063.739699] env[62974]: INFO nova.compute.manager [req-bc045a5c-0990-426e-8245-2b0949b54cf0 req-6394828a-cbe4-4bbc-8605-71577eaad23c service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Neutron deleted interface f60dfb07-76b9-46c7-b661-8dd02b1e8bb7; detaching it from the instance and deleting it from the info cache [ 1063.739868] env[62974]: DEBUG nova.network.neutron [req-bc045a5c-0990-426e-8245-2b0949b54cf0 req-6394828a-cbe4-4bbc-8605-71577eaad23c service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.820691] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655010, 'name': Destroy_Task} progress is 33%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.958008] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655011, 'name': ReconfigVM_Task, 'duration_secs': 0.267222} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.958387] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1063.994724] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.996023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.996023] env[62974]: INFO nova.compute.manager [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Attaching volume 991a4610-c157-453f-b11c-c96faca0b73a to /dev/sdb [ 1064.025366] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893ce8f7-9fef-4405-8837-7212cdf41233 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.035579] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2f0107-ce45-43e1-b297-8f22fef18fa3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.050789] env[62974]: DEBUG nova.virt.block_device [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updating existing volume attachment record: 07ed4129-0085-4c19-8006-d6b7cfbbc120 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1064.224036] env[62974]: DEBUG nova.network.neutron [-] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.244806] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f08eb62-3b53-4af0-853f-24e533baebaa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.251967] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1892aa-5720-49e4-8f9f-05829ad3a6d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.284662] env[62974]: DEBUG nova.compute.manager [req-bc045a5c-0990-426e-8245-2b0949b54cf0 req-6394828a-cbe4-4bbc-8605-71577eaad23c service nova] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Detach interface failed, port_id=f60dfb07-76b9-46c7-b661-8dd02b1e8bb7, reason: Instance 6d6331f3-327a-4f11-973e-37c1a3d9701c could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1064.321539] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655010, 'name': Destroy_Task, 'duration_secs': 0.790609} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.321782] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Destroyed the VM [ 1064.322029] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1064.322288] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-72c24cda-835a-4d3a-a47f-67f43df0b63c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.331048] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1064.331048] env[62974]: value = "task-2655015" [ 1064.331048] env[62974]: _type = "Task" [ 1064.331048] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.340493] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655015, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.466175] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1064.466457] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1064.466636] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1064.466860] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1064.467055] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1064.467272] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1064.467524] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1064.467765] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1064.467949] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1064.468219] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1064.468443] env[62974]: DEBUG nova.virt.hardware [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1064.474788] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1064.475111] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a1ec4ef-8f07-4b73-8e23-edd3c89e2522 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.495664] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1064.495664] env[62974]: value = "task-2655016" [ 1064.495664] env[62974]: _type = "Task" [ 1064.495664] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.504200] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655016, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.726743] env[62974]: INFO nova.compute.manager [-] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Took 1.28 seconds to deallocate network for instance. [ 1064.840617] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655015, 'name': RemoveSnapshot_Task, 'duration_secs': 0.403736} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.840876] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1064.841198] env[62974]: DEBUG nova.compute.manager [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.841953] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e121c0-06a2-4161-914d-cd6fe1282d2f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.007884] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655016, 'name': ReconfigVM_Task, 'duration_secs': 0.281459} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.008191] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1065.008954] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ba5afb-dba1-44b2-8316-a1ced5a71e41 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.032281] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-cf84a2af-6e27-461e-9af2-0471881dd540/volume-cf84a2af-6e27-461e-9af2-0471881dd540.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.032581] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdc2ac68-a7a9-4e51-abfc-e60ab2a64237 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.053010] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1065.053010] env[62974]: value = "task-2655017" [ 1065.053010] env[62974]: _type = "Task" [ 1065.053010] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.062236] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.234169] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.234450] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.234677] env[62974]: DEBUG nova.objects.instance [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lazy-loading 'resources' on Instance uuid 6d6331f3-327a-4f11-973e-37c1a3d9701c {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.353670] env[62974]: INFO nova.compute.manager [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Shelve offloading [ 1065.564229] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655017, 'name': ReconfigVM_Task, 'duration_secs': 0.264202} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.564504] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-cf84a2af-6e27-461e-9af2-0471881dd540/volume-cf84a2af-6e27-461e-9af2-0471881dd540.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.564778] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1065.856682] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1065.856985] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64b4272c-2dc2-4134-b4e6-206b6ef4202b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.864992] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1065.864992] env[62974]: value = "task-2655018" [ 1065.864992] env[62974]: _type = "Task" [ 1065.864992] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.873479] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.951415] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf431c1-4479-483a-a160-764510bc447d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.959636] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa793e21-d780-4a82-93e6-64838a31880d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.990292] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16e0ae3-a5af-43ab-b499-c92cfe6b2ef3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.997486] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbf89ca-b14d-4cc9-8afa-1244491e8878 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.010575] env[62974]: DEBUG nova.compute.provider_tree [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.073961] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e62b41b-757d-45b2-b4b9-3c069ee13c38 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.094893] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a89ea7-283f-41ee-90a1-c3dbfc27b867 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.112054] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1066.379021] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1066.379021] env[62974]: DEBUG nova.compute.manager [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.379021] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e90069c-3ec2-4e71-bf6c-2428b90cfaa5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.383836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.384192] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.384508] env[62974]: DEBUG nova.network.neutron [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.516019] env[62974]: DEBUG nova.scheduler.client.report [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.019277] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.785s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.039498] env[62974]: INFO nova.scheduler.client.report [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Deleted allocations for instance 6d6331f3-327a-4f11-973e-37c1a3d9701c [ 1067.253523] env[62974]: DEBUG nova.network.neutron [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc699c4dc-40", "ovs_interfaceid": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.550065] env[62974]: DEBUG oslo_concurrency.lockutils [None req-74b91563-850a-4af5-9323-e6d27503ff52 tempest-ServerRescueTestJSON-805941023 tempest-ServerRescueTestJSON-805941023-project-member] Lock "6d6331f3-327a-4f11-973e-37c1a3d9701c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.756s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.756031] env[62974]: DEBUG nova.network.neutron [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Port 5d45e949-a386-4bc2-a1a4-e9232bcaeeba binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1067.758380] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.104985] env[62974]: DEBUG nova.compute.manager [req-6382118c-e726-4be6-8f9a-c446e601d8ef req-f6c307cc-5165-458e-bd02-1ea607a79822 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-vif-unplugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1068.104985] env[62974]: DEBUG oslo_concurrency.lockutils [req-6382118c-e726-4be6-8f9a-c446e601d8ef req-f6c307cc-5165-458e-bd02-1ea607a79822 service nova] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.104985] env[62974]: DEBUG oslo_concurrency.lockutils [req-6382118c-e726-4be6-8f9a-c446e601d8ef req-f6c307cc-5165-458e-bd02-1ea607a79822 service nova] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.104985] env[62974]: DEBUG oslo_concurrency.lockutils [req-6382118c-e726-4be6-8f9a-c446e601d8ef req-f6c307cc-5165-458e-bd02-1ea607a79822 service nova] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.105451] env[62974]: DEBUG nova.compute.manager [req-6382118c-e726-4be6-8f9a-c446e601d8ef req-f6c307cc-5165-458e-bd02-1ea607a79822 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] No waiting events found dispatching network-vif-unplugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1068.106805] env[62974]: WARNING nova.compute.manager [req-6382118c-e726-4be6-8f9a-c446e601d8ef req-f6c307cc-5165-458e-bd02-1ea607a79822 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received unexpected event network-vif-unplugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a for instance with vm_state shelved and task_state shelving_offloading. [ 1068.272257] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1068.272820] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee5f1d3-ff37-42e1-befb-42928bc7000f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.281694] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1068.281910] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75795eda-0d14-4073-a6dc-518cfdcd497c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.366302] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1068.366511] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1068.366692] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleting the datastore file [datastore1] c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.366948] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7bcb65a-1081-4762-8637-05159b54245d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.377122] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1068.377122] env[62974]: value = "task-2655020" [ 1068.377122] env[62974]: _type = "Task" [ 1068.377122] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.385839] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655020, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.785325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.785325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.785325] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.886510] env[62974]: DEBUG oslo_vmware.api [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655020, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14174} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.886862] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.887116] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.887357] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.920544] env[62974]: INFO nova.scheduler.client.report [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleted allocations for instance c90c9a6d-661f-4574-8a0d-7d8cacf8618d [ 1069.426881] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.427235] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.427465] env[62974]: DEBUG nova.objects.instance [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lazy-loading 'resources' on Instance uuid c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.823257] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.823461] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.823639] env[62974]: DEBUG nova.network.neutron [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1069.933619] env[62974]: DEBUG nova.objects.instance [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lazy-loading 'numa_topology' on Instance uuid c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.262041] env[62974]: DEBUG nova.compute.manager [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1070.262041] env[62974]: DEBUG nova.compute.manager [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing instance network info cache due to event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1070.262237] env[62974]: DEBUG oslo_concurrency.lockutils [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] Acquiring lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.262380] env[62974]: DEBUG oslo_concurrency.lockutils [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] Acquired lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.262556] env[62974]: DEBUG nova.network.neutron [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.323802] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.324234] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.436028] env[62974]: DEBUG nova.objects.base [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1070.604573] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b13c65a-9e87-471c-a20a-7c57d08f561b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.614410] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a998fe98-9bb2-4b92-8349-87a37071e9ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.647731] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbf1fee-2831-45de-af23-eebdc519ddc3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.655157] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d72a1e9-ab72-4ba8-a91d-7a6f274459e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.670193] env[62974]: DEBUG nova.compute.provider_tree [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.701019] env[62974]: DEBUG nova.network.neutron [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [{"id": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "address": "fa:16:3e:8a:1f:9e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d45e949-a3", "ovs_interfaceid": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.829124] env[62974]: DEBUG nova.compute.utils [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1071.175419] env[62974]: DEBUG nova.scheduler.client.report [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.203863] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.262842] env[62974]: DEBUG oslo_concurrency.lockutils [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.262842] env[62974]: DEBUG oslo_concurrency.lockutils [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.331588] env[62974]: DEBUG nova.network.neutron [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updated VIF entry in instance network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1071.332646] env[62974]: DEBUG nova.network.neutron [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc699c4dc-40", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.333412] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.683183] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.256s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.717337] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b9cb20-919f-4202-97ae-f1d8952102ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.732508] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b389f51-f2ed-4d66-9e6e-f3df1a00a554 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.766773] env[62974]: DEBUG nova.compute.utils [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1071.836245] env[62974]: DEBUG oslo_concurrency.lockutils [req-e4e09e2c-8dda-4149-b77a-1fbc2902bf56 req-4cb62879-ca13-4248-919f-a5240d99cd39 service nova] Releasing lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.193638] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ed4cbe19-55ba-4a9f-bcde-7ba4977f83a0 tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.291s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.274011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.604862] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1072.605128] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535487', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'name': 'volume-991a4610-c157-453f-b11c-c96faca0b73a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4de11643-da0a-453f-b03e-ca19819f4f06', 'attached_at': '', 'detached_at': '', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'serial': '991a4610-c157-453f-b11c-c96faca0b73a'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1072.606117] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63470b25-c954-4b35-89fa-54e187b35b75 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.622740] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4f2159-aef5-4ae1-b8b7-cf1ccc311647 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.648475] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] volume-991a4610-c157-453f-b11c-c96faca0b73a/volume-991a4610-c157-453f-b11c-c96faca0b73a.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1072.648805] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4676c18-5e00-4aa0-9f0e-0026b0d523a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.670958] env[62974]: DEBUG oslo_vmware.api [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1072.670958] env[62974]: value = "task-2655022" [ 1072.670958] env[62974]: _type = "Task" [ 1072.670958] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.680111] env[62974]: DEBUG oslo_vmware.api [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655022, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.862607] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19365827-7593-4e02-afe1-73b6863ca7f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.887368] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a88356-3c6d-4b1c-bfb5-db22d0853992 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.890571] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.890801] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.891028] env[62974]: INFO nova.compute.manager [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Attaching volume e8b7a0d1-e9c7-4352-9452-ec4d12c598bb to /dev/sdb [ 1072.897182] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1072.930925] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415397dd-999f-4e7f-a4be-68e81ff4cac9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.937918] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39f8d04-4344-4f0d-80e7-beb0b7c7be86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.952635] env[62974]: DEBUG nova.virt.block_device [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating existing volume attachment record: a3340023-d589-4d72-bce2-a4aa67e5cd64 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1073.180912] env[62974]: DEBUG oslo_vmware.api [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655022, 'name': ReconfigVM_Task, 'duration_secs': 0.34509} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.181208] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Reconfigured VM instance instance-00000063 to attach disk [datastore2] volume-991a4610-c157-453f-b11c-c96faca0b73a/volume-991a4610-c157-453f-b11c-c96faca0b73a.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1073.185964] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5055140-2af7-4087-9735-6d64c70d58de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.202029] env[62974]: DEBUG oslo_vmware.api [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1073.202029] env[62974]: value = "task-2655024" [ 1073.202029] env[62974]: _type = "Task" [ 1073.202029] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.208664] env[62974]: DEBUG oslo_vmware.api [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.349734] env[62974]: DEBUG oslo_concurrency.lockutils [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.349836] env[62974]: DEBUG oslo_concurrency.lockutils [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.350022] env[62974]: INFO nova.compute.manager [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Attaching volume 9ec1224e-9aca-4ae8-9379-33e489d62a82 to /dev/sdb [ 1073.383416] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357d3f27-3ef5-42e4-85c7-c7e7cf763e31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.390360] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4575fa-3553-40eb-bf5e-b4266a6f742a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.406051] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1073.406439] env[62974]: DEBUG nova.virt.block_device [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updating existing volume attachment record: 11ade9e6-6ca3-4fe4-b1f5-aee23ac77c20 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1073.408729] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-453b48d7-b365-4aaf-a73a-752f747d19d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.415205] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1073.415205] env[62974]: value = "task-2655026" [ 1073.415205] env[62974]: _type = "Task" [ 1073.415205] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.427791] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.540929] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "b2d46229-31a9-4be1-bd17-5411deb4944c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.541175] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.585223] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.585223] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.585415] env[62974]: INFO nova.compute.manager [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Unshelving [ 1073.711707] env[62974]: DEBUG oslo_vmware.api [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655024, 'name': ReconfigVM_Task, 'duration_secs': 0.133168} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.712032] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535487', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'name': 'volume-991a4610-c157-453f-b11c-c96faca0b73a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4de11643-da0a-453f-b03e-ca19819f4f06', 'attached_at': '', 'detached_at': '', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'serial': '991a4610-c157-453f-b11c-c96faca0b73a'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1073.924576] env[62974]: DEBUG oslo_vmware.api [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655026, 'name': PowerOnVM_Task, 'duration_secs': 0.419674} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.924909] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.925777] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49a9fd-d6a6-48fe-bdda-5f6ef962fe94 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1074.044066] env[62974]: DEBUG nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1074.566186] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.566457] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.567952] env[62974]: INFO nova.compute.claims [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.607635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.721982] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186cf858-6a3b-48b5-bd33-ac6df22e4598 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.729413] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ccdb34-861c-42c5-85a1-dd6a0a49b887 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.760969] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6d1380-f8ad-4db3-8c71-7dfa3a2c5bb2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.768173] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33cc309-842b-431c-b044-247249fe42b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.781633] env[62974]: DEBUG nova.compute.provider_tree [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.285630] env[62974]: DEBUG nova.scheduler.client.report [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1076.792337] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.792790] env[62974]: DEBUG nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1076.795531] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.188s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.795740] env[62974]: DEBUG nova.objects.instance [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lazy-loading 'pci_requests' on Instance uuid c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.212475] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.212717] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.212884] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1077.213012] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Rebuilding the list of instances to heal {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1077.298693] env[62974]: DEBUG nova.compute.utils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1077.300560] env[62974]: DEBUG nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1077.300742] env[62974]: DEBUG nova.network.neutron [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1077.303842] env[62974]: DEBUG nova.objects.instance [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lazy-loading 'numa_topology' on Instance uuid c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.339152] env[62974]: DEBUG nova.policy [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '470755cebe0c479aa83ee615c853f7a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ab55aed0c6c473c8f419c00ec3a555d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1077.592846] env[62974]: DEBUG nova.network.neutron [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Successfully created port: 9d3a88c4-e165-4874-8774-791991bbb3bb {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.717893] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1077.755990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.756169] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.756316] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1077.756466] env[62974]: DEBUG nova.objects.instance [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lazy-loading 'info_cache' on Instance uuid cf73422d-7f4b-4bae-9d69-de74d7211243 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.806422] env[62974]: DEBUG nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1077.808797] env[62974]: INFO nova.compute.claims [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.962055] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1077.962323] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535490', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'name': 'volume-9ec1224e-9aca-4ae8-9379-33e489d62a82', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'serial': '9ec1224e-9aca-4ae8-9379-33e489d62a82'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1077.963334] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dcc142-8b93-4e93-8b47-cb2bdd93a45e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.980190] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d27342-cb1b-49bb-998e-548992dc9d56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.005357] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-9ec1224e-9aca-4ae8-9379-33e489d62a82/volume-9ec1224e-9aca-4ae8-9379-33e489d62a82.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.006461] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1078.006689] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535489', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'name': 'volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '220295bf-b021-4800-bc7e-a3dd311c747a', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'serial': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1078.006913] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e4f7a2b-b1b0-4604-aac5-e64ebf37e226 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.020044] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503a9597-6595-4493-b364-9f91d6634000 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.038622] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6fce34-7d49-4578-8dc8-38b1c18e4463 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.041203] env[62974]: DEBUG oslo_vmware.api [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1078.041203] env[62974]: value = "task-2655031" [ 1078.041203] env[62974]: _type = "Task" [ 1078.041203] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.066391] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb/volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.069518] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58d9a3f2-a8cc-40fe-90df-b79a5989b8a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.090209] env[62974]: DEBUG oslo_vmware.api [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655031, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.096911] env[62974]: DEBUG oslo_vmware.api [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1078.096911] env[62974]: value = "task-2655032" [ 1078.096911] env[62974]: _type = "Task" [ 1078.096911] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.105320] env[62974]: DEBUG oslo_vmware.api [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655032, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.543293] env[62974]: DEBUG nova.objects.instance [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'flavor' on Instance uuid 4de11643-da0a-453f-b03e-ca19819f4f06 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.553586] env[62974]: DEBUG oslo_vmware.api [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655031, 'name': ReconfigVM_Task, 'duration_secs': 0.371498} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.553884] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-9ec1224e-9aca-4ae8-9379-33e489d62a82/volume-9ec1224e-9aca-4ae8-9379-33e489d62a82.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1078.559060] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8e0d98c-dd8a-4e0e-8692-8cb65358a15b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.573716] env[62974]: DEBUG oslo_vmware.api [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1078.573716] env[62974]: value = "task-2655033" [ 1078.573716] env[62974]: _type = "Task" [ 1078.573716] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.581643] env[62974]: DEBUG oslo_vmware.api [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.608426] env[62974]: DEBUG oslo_vmware.api [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655032, 'name': ReconfigVM_Task, 'duration_secs': 0.363777} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.608705] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfigured VM instance instance-00000066 to attach disk [datastore2] volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb/volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1078.613359] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-499e2d3d-e509-4faa-a797-21d7f24270b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.629981] env[62974]: DEBUG oslo_vmware.api [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1078.629981] env[62974]: value = "task-2655034" [ 1078.629981] env[62974]: _type = "Task" [ 1078.629981] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.637995] env[62974]: DEBUG oslo_vmware.api [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655034, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.824299] env[62974]: DEBUG nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1078.852371] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.852614] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.852815] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.852960] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.853123] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.853277] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.853489] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.853651] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.853932] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.853999] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.854195] env[62974]: DEBUG nova.virt.hardware [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.854980] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840f48a8-5864-406c-80bb-0cfb250b9ff5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.866542] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f7a558-17b4-43bd-aa1a-27fd1d8279e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.954122] env[62974]: DEBUG nova.compute.manager [req-7721a3ea-d579-40d8-98fb-479f298eb901 req-fcd10858-83a1-4967-bf92-2af36e9c5d0d service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Received event network-vif-plugged-9d3a88c4-e165-4874-8774-791991bbb3bb {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1078.954437] env[62974]: DEBUG oslo_concurrency.lockutils [req-7721a3ea-d579-40d8-98fb-479f298eb901 req-fcd10858-83a1-4967-bf92-2af36e9c5d0d service nova] Acquiring lock "b2d46229-31a9-4be1-bd17-5411deb4944c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.954534] env[62974]: DEBUG oslo_concurrency.lockutils [req-7721a3ea-d579-40d8-98fb-479f298eb901 req-fcd10858-83a1-4967-bf92-2af36e9c5d0d service nova] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.954701] env[62974]: DEBUG oslo_concurrency.lockutils [req-7721a3ea-d579-40d8-98fb-479f298eb901 req-fcd10858-83a1-4967-bf92-2af36e9c5d0d service nova] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.954865] env[62974]: DEBUG nova.compute.manager [req-7721a3ea-d579-40d8-98fb-479f298eb901 req-fcd10858-83a1-4967-bf92-2af36e9c5d0d service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] No waiting events found dispatching network-vif-plugged-9d3a88c4-e165-4874-8774-791991bbb3bb {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1078.955266] env[62974]: WARNING nova.compute.manager [req-7721a3ea-d579-40d8-98fb-479f298eb901 req-fcd10858-83a1-4967-bf92-2af36e9c5d0d service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Received unexpected event network-vif-plugged-9d3a88c4-e165-4874-8774-791991bbb3bb for instance with vm_state building and task_state spawning. [ 1078.978537] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979734ba-878f-4a87-bcf3-58dac430f102 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.985612] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb57af58-9b2c-460f-8c9c-6bdf32062768 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.015320] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb38639-f4b7-4a5d-9ff2-6ace16c8cd51 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.022676] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e1c807-1eee-437e-9257-73452c1a7303 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.035524] env[62974]: DEBUG nova.compute.provider_tree [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.048257] env[62974]: DEBUG nova.network.neutron [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Successfully updated port: 9d3a88c4-e165-4874-8774-791991bbb3bb {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.054064] env[62974]: DEBUG oslo_concurrency.lockutils [None req-36e97553-e56f-48f4-8a01-fe626d06f4bc tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 15.059s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.083668] env[62974]: DEBUG oslo_vmware.api [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655033, 'name': ReconfigVM_Task, 'duration_secs': 0.137086} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.083852] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535490', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'name': 'volume-9ec1224e-9aca-4ae8-9379-33e489d62a82', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'serial': '9ec1224e-9aca-4ae8-9379-33e489d62a82'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1079.141512] env[62974]: DEBUG oslo_vmware.api [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655034, 'name': ReconfigVM_Task, 'duration_secs': 0.136063} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.141866] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535489', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'name': 'volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '220295bf-b021-4800-bc7e-a3dd311c747a', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'serial': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1079.491955] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [{"id": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "address": "fa:16:3e:34:87:aa", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e4b21ba-e0", "ovs_interfaceid": "7e4b21ba-e0f2-4104-8f46-57871fd6ed16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.540117] env[62974]: DEBUG nova.scheduler.client.report [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.552082] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "refresh_cache-b2d46229-31a9-4be1-bd17-5411deb4944c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.552230] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquired lock "refresh_cache-b2d46229-31a9-4be1-bd17-5411deb4944c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.552358] env[62974]: DEBUG nova.network.neutron [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.805265] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.805552] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.805746] env[62974]: DEBUG nova.compute.manager [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Going to confirm migration 7 {{(pid=62974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1079.994948] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-cf73422d-7f4b-4bae-9d69-de74d7211243" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.995257] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1079.995506] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.995775] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.995956] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.996129] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.996276] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.996421] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.996549] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1079.996741] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.044830] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.249s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.101188] env[62974]: DEBUG nova.network.neutron [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.108956] env[62974]: INFO nova.network.neutron [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1080.124695] env[62974]: DEBUG nova.objects.instance [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid a44cca2f-9286-490a-9013-1fea30984fa5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.180662] env[62974]: DEBUG nova.objects.instance [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'flavor' on Instance uuid 220295bf-b021-4800-bc7e-a3dd311c747a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.400020] env[62974]: DEBUG nova.network.neutron [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Updating instance_info_cache with network_info: [{"id": "9d3a88c4-e165-4874-8774-791991bbb3bb", "address": "fa:16:3e:ca:ac:c8", "network": {"id": "b2d7c57b-653c-4968-b238-d96d54c4967a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-286216878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ab55aed0c6c473c8f419c00ec3a555d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3a88c4-e1", "ovs_interfaceid": "9d3a88c4-e165-4874-8774-791991bbb3bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.444448] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.444793] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquired lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.445115] env[62974]: DEBUG nova.network.neutron [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.445429] env[62974]: DEBUG nova.objects.instance [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'info_cache' on Instance uuid c3801f86-5aaa-42cd-a6b2-1b72b77aa74c {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.504019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.504019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.504019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.504019] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1080.504019] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f07bbab-af4e-4809-ad5e-2d412a1234e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.513701] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d187d03b-14a6-491e-8a5a-cc094b2502ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.538322] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe82b48-ab6d-473b-8e6f-c434515583ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.546416] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78008516-ef1b-4e96-beac-c052a2bd1bd7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.593081] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179610MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1080.593351] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.593638] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.630550] env[62974]: DEBUG oslo_concurrency.lockutils [None req-24107c35-eb75-41af-b6b3-7de544a36188 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.281s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.685372] env[62974]: DEBUG oslo_concurrency.lockutils [None req-e03e8f10-f04d-496c-ac2d-ffe3dca696bc tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.794s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.781076] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "2a962aab-3057-43df-97f7-b63ce808fb90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.781297] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "2a962aab-3057-43df-97f7-b63ce808fb90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.904545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Releasing lock "refresh_cache-b2d46229-31a9-4be1-bd17-5411deb4944c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.904824] env[62974]: DEBUG nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Instance network_info: |[{"id": "9d3a88c4-e165-4874-8774-791991bbb3bb", "address": "fa:16:3e:ca:ac:c8", "network": {"id": "b2d7c57b-653c-4968-b238-d96d54c4967a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-286216878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ab55aed0c6c473c8f419c00ec3a555d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3a88c4-e1", "ovs_interfaceid": "9d3a88c4-e165-4874-8774-791991bbb3bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1080.905281] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:ac:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d3a88c4-e165-4874-8774-791991bbb3bb', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.913586] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Creating folder: Project (8ab55aed0c6c473c8f419c00ec3a555d). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.914217] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-076ac4ff-85f5-4a3f-9a96-726f7a8168ae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.925422] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Created folder: Project (8ab55aed0c6c473c8f419c00ec3a555d) in parent group-v535199. [ 1080.925600] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Creating folder: Instances. Parent ref: group-v535491. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.925835] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd449ac8-8f4e-4747-9245-5a8b998aa567 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.935750] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Created folder: Instances in parent group-v535491. [ 1080.935750] env[62974]: DEBUG oslo.service.loopingcall [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1080.935750] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.935985] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcb02476-21bc-4fc8-9f76-33bc2321a1eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.957115] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.957115] env[62974]: value = "task-2655037" [ 1080.957115] env[62974]: _type = "Task" [ 1080.957115] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.967154] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655037, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.013947] env[62974]: DEBUG nova.compute.manager [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Received event network-changed-9d3a88c4-e165-4874-8774-791991bbb3bb {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1081.014227] env[62974]: DEBUG nova.compute.manager [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Refreshing instance network info cache due to event network-changed-9d3a88c4-e165-4874-8774-791991bbb3bb. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1081.014400] env[62974]: DEBUG oslo_concurrency.lockutils [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] Acquiring lock "refresh_cache-b2d46229-31a9-4be1-bd17-5411deb4944c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.014556] env[62974]: DEBUG oslo_concurrency.lockutils [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] Acquired lock "refresh_cache-b2d46229-31a9-4be1-bd17-5411deb4944c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.014758] env[62974]: DEBUG nova.network.neutron [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Refreshing network info cache for port 9d3a88c4-e165-4874-8774-791991bbb3bb {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.283775] env[62974]: DEBUG nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1081.469624] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655037, 'name': CreateVM_Task, 'duration_secs': 0.4383} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.470321] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1081.471114] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.471346] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.471715] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1081.472282] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af1de586-457e-4ccf-a5de-870b5cbaee4f tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.472477] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af1de586-457e-4ccf-a5de-870b5cbaee4f tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.472805] env[62974]: DEBUG nova.objects.instance [None req-af1de586-457e-4ccf-a5de-870b5cbaee4f tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'flavor' on Instance uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.473884] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d511b63-925b-4677-a4e6-83bf0a5fa188 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.480090] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1081.480090] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bd03d1-bc02-5791-9aa1-d5ca38c51c83" [ 1081.480090] env[62974]: _type = "Task" [ 1081.480090] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.482917] env[62974]: DEBUG nova.compute.manager [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1081.497791] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bd03d1-bc02-5791-9aa1-d5ca38c51c83, 'name': SearchDatastore_Task, 'duration_secs': 0.012121} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.498204] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.498325] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.498542] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.498688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.498866] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.499197] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89514cfd-cecd-465d-908d-5d9e4059bcc7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.507607] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.507790] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1081.508542] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-931283fb-a138-490f-8672-f6149041a400 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.513876] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1081.513876] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52be3534-afd8-a64f-6086-135f46e726b8" [ 1081.513876] env[62974]: _type = "Task" [ 1081.513876] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.523514] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52be3534-afd8-a64f-6086-135f46e726b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.603682] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Applying migration context for instance c3801f86-5aaa-42cd-a6b2-1b72b77aa74c as it has an incoming, in-progress migration 9e89606c-d22f-4fbc-9789-776bf08c4296. Migration status is confirming {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1081.605080] env[62974]: INFO nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating resource usage from migration 1a61954c-c534-4854-94df-272a36bdfb72 [ 1081.605232] env[62974]: INFO nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating resource usage from migration 9e89606c-d22f-4fbc-9789-776bf08c4296 [ 1081.627869] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cf73422d-7f4b-4bae-9d69-de74d7211243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.628043] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 514e0f15-f27d-4fab-9107-b92884075420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.628191] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance dca952df-dac9-4502-948b-24ac6fb939f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.628362] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 11bd6a5d-9590-4aa3-aaf3-99d2ac394553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.628670] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 4de11643-da0a-453f-b03e-ca19819f4f06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.628670] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a44cca2f-9286-490a-9013-1fea30984fa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.628822] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 72b0b643-7747-4dae-9d85-c8c6a573ce07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.628896] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Migration 9e89606c-d22f-4fbc-9789-776bf08c4296 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1081.629009] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c3801f86-5aaa-42cd-a6b2-1b72b77aa74c actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.629123] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c90c9a6d-661f-4574-8a0d-7d8cacf8618d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.629230] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance b2d46229-31a9-4be1-bd17-5411deb4944c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1081.695491] env[62974]: DEBUG oslo_concurrency.lockutils [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.695726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.805427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.857236] env[62974]: DEBUG nova.network.neutron [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [{"id": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "address": "fa:16:3e:8a:1f:9e", "network": {"id": "152ae676-4f24-4613-95a2-646be368a6c4", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2089459280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9087d01b1ad748e0a66474953dfe7034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d45e949-a3", "ovs_interfaceid": "5d45e949-a386-4bc2-a1a4-e9232bcaeeba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.916035] env[62974]: DEBUG nova.network.neutron [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Updated VIF entry in instance network info cache for port 9d3a88c4-e165-4874-8774-791991bbb3bb. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1081.916409] env[62974]: DEBUG nova.network.neutron [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Updating instance_info_cache with network_info: [{"id": "9d3a88c4-e165-4874-8774-791991bbb3bb", "address": "fa:16:3e:ca:ac:c8", "network": {"id": "b2d7c57b-653c-4968-b238-d96d54c4967a", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-286216878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ab55aed0c6c473c8f419c00ec3a555d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3a88c4-e1", "ovs_interfaceid": "9d3a88c4-e165-4874-8774-791991bbb3bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.964235] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "dca952df-dac9-4502-948b-24ac6fb939f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.965031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "dca952df-dac9-4502-948b-24ac6fb939f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.965031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "dca952df-dac9-4502-948b-24ac6fb939f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.965031] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "dca952df-dac9-4502-948b-24ac6fb939f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.965207] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "dca952df-dac9-4502-948b-24ac6fb939f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.967219] env[62974]: INFO nova.compute.manager [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Terminating instance [ 1081.977574] env[62974]: DEBUG nova.objects.instance [None req-af1de586-457e-4ccf-a5de-870b5cbaee4f tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'pci_requests' on Instance uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.994970] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.995218] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.995474] env[62974]: DEBUG nova.network.neutron [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.006236] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.025081] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52be3534-afd8-a64f-6086-135f46e726b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009988} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.025857] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebeec7ff-9063-45f9-b3c4-39e8191907bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.030627] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1082.030627] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b3aad8-8de9-4df1-4eea-6a07eb026c04" [ 1082.030627] env[62974]: _type = "Task" [ 1082.030627] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.037989] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b3aad8-8de9-4df1-4eea-6a07eb026c04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.132804] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 2a962aab-3057-43df-97f7-b63ce808fb90 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1082.133147] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Migration 1a61954c-c534-4854-94df-272a36bdfb72 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1082.133319] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 220295bf-b021-4800-bc7e-a3dd311c747a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1082.133570] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1082.133720] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1082.198806] env[62974]: DEBUG nova.compute.utils [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1082.314972] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17b0eb0-57ab-4538-bcf3-06f0d740b494 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.322572] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7f1eac-8118-4813-9161-3a66a3622bd1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.351746] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc03dc9c-1abb-4ccf-977a-6d5f204a78e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.358992] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af74c4a6-51e9-4e98-90c7-319136860951 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.364440] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Releasing lock "refresh_cache-c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.364680] env[62974]: DEBUG nova.objects.instance [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'migration_context' on Instance uuid c3801f86-5aaa-42cd-a6b2-1b72b77aa74c {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.374828] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.419145] env[62974]: DEBUG oslo_concurrency.lockutils [req-26f4fb52-874f-47b0-9300-0b2f3e8985fd req-8de66e6f-6fd6-48e4-b971-532024ec3add service nova] Releasing lock "refresh_cache-b2d46229-31a9-4be1-bd17-5411deb4944c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.470810] env[62974]: DEBUG nova.compute.manager [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1082.471070] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1082.471954] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c32f349-e525-4483-8033-58cf79c84b48 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.479879] env[62974]: DEBUG nova.objects.base [None req-af1de586-457e-4ccf-a5de-870b5cbaee4f tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Object Instance<72b0b643-7747-4dae-9d85-c8c6a573ce07> lazy-loaded attributes: flavor,pci_requests {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1082.480092] env[62974]: DEBUG nova.network.neutron [None req-af1de586-457e-4ccf-a5de-870b5cbaee4f tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1082.481907] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1082.482155] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a559df01-07b5-40d3-b723-06f7edaf2070 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.488802] env[62974]: DEBUG oslo_vmware.api [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1082.488802] env[62974]: value = "task-2655038" [ 1082.488802] env[62974]: _type = "Task" [ 1082.488802] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.496390] env[62974]: DEBUG oslo_vmware.api [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655038, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.540776] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b3aad8-8de9-4df1-4eea-6a07eb026c04, 'name': SearchDatastore_Task, 'duration_secs': 0.009057} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.543142] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.543494] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] b2d46229-31a9-4be1-bd17-5411deb4944c/b2d46229-31a9-4be1-bd17-5411deb4944c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.543868] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8372a81-e5a6-4922-8f6f-96af9c6b7010 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.550373] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1082.550373] env[62974]: value = "task-2655039" [ 1082.550373] env[62974]: _type = "Task" [ 1082.550373] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.558154] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.603984] env[62974]: DEBUG oslo_concurrency.lockutils [None req-af1de586-457e-4ccf-a5de-870b5cbaee4f tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.131s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.702562] env[62974]: DEBUG oslo_concurrency.lockutils [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.759384] env[62974]: DEBUG nova.network.neutron [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc699c4dc-40", "ovs_interfaceid": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.868090] env[62974]: DEBUG nova.objects.base [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1082.869094] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4628429-1266-4097-8239-8a5ed8fc6e65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.888838] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.893131] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-696a72c5-e9f9-462d-80af-67ffb0989729 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.898905] env[62974]: DEBUG oslo_vmware.api [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1082.898905] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52815f8e-78a8-d5d6-2f70-1c587560d75c" [ 1082.898905] env[62974]: _type = "Task" [ 1082.898905] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.907513] env[62974]: DEBUG oslo_vmware.api [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52815f8e-78a8-d5d6-2f70-1c587560d75c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.998981] env[62974]: DEBUG oslo_vmware.api [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655038, 'name': PowerOffVM_Task, 'duration_secs': 0.380641} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.999284] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.999463] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.999723] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be5a13ad-c1ad-4c03-9e7f-bc94949fa658 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.060310] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478572} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.060695] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] b2d46229-31a9-4be1-bd17-5411deb4944c/b2d46229-31a9-4be1-bd17-5411deb4944c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.060794] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.061046] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3db3477a-0eed-496d-bbc4-281727c30567 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.067051] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1083.067051] env[62974]: value = "task-2655041" [ 1083.067051] env[62974]: _type = "Task" [ 1083.067051] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.076224] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.262819] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.291350] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='72d9a9e6d41f2d919c0d6c3e971a3f86',container_format='bare',created_at=2025-02-19T03:59:10Z,direct_url=,disk_format='vmdk',id=ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2029616813-shelved',owner='28bc1945aba64a2ea67745b0d417b9ef',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-02-19T03:59:26Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.291544] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.291719] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.291892] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.292048] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.292199] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.292437] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.292554] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.292719] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.292906] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.293070] env[62974]: DEBUG nova.virt.hardware [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.294156] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d48c8c-09cd-44d0-8691-2a6165222d46 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.302442] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee637908-e245-49ff-a68b-fb51c5da8dc2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.315981] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:c2:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c699c4dc-40cf-4eaa-9ba6-5e255a43e01a', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.323322] env[62974]: DEBUG oslo.service.loopingcall [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.323560] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.323766] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7808e5b9-24a0-478e-83e4-8acbf5fc3119 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.343387] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.343387] env[62974]: value = "task-2655042" [ 1083.343387] env[62974]: _type = "Task" [ 1083.343387] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.350356] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655042, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.395099] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1083.395290] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.802s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.395666] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.590s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.397575] env[62974]: INFO nova.compute.claims [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.400312] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.400586] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Cleaning up deleted instances with incomplete migration {{(pid=62974) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1083.414566] env[62974]: DEBUG oslo_vmware.api [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52815f8e-78a8-d5d6-2f70-1c587560d75c, 'name': SearchDatastore_Task, 'duration_secs': 0.028489} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.414861] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.577618] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055121} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.577883] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.578657] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99886008-5aea-4de4-a367-00544b80b37c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.599745] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] b2d46229-31a9-4be1-bd17-5411deb4944c/b2d46229-31a9-4be1-bd17-5411deb4944c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.600020] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f8ad5bc-2e67-4953-95ec-f2e27283803f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.622198] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1083.622198] env[62974]: value = "task-2655043" [ 1083.622198] env[62974]: _type = "Task" [ 1083.622198] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.633465] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655043, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.758232] env[62974]: DEBUG oslo_concurrency.lockutils [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.758519] env[62974]: DEBUG oslo_concurrency.lockutils [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.758765] env[62974]: INFO nova.compute.manager [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Attaching volume a6000f09-20a7-4f82-af23-96b01a5803f7 to /dev/sdc [ 1083.783756] env[62974]: DEBUG nova.compute.manager [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-vif-plugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1083.784032] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.784228] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.784397] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.784561] env[62974]: DEBUG nova.compute.manager [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] No waiting events found dispatching network-vif-plugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1083.784726] env[62974]: WARNING nova.compute.manager [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received unexpected event network-vif-plugged-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a for instance with vm_state shelved_offloaded and task_state spawning. [ 1083.784882] env[62974]: DEBUG nova.compute.manager [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1083.785048] env[62974]: DEBUG nova.compute.manager [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing instance network info cache due to event network-changed-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1083.785231] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] Acquiring lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.785364] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] Acquired lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.785514] env[62974]: DEBUG nova.network.neutron [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Refreshing network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.791188] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11319ae2-c100-44f9-b1e2-ee816487f877 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.800936] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb701b60-05f3-417c-a6fa-13de77d8c570 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.815207] env[62974]: DEBUG nova.virt.block_device [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updating existing volume attachment record: 34d75f72-a3b2-4fbb-96f2-8d4aa65dfa4c {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1083.851644] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655042, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.007348] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1084.132591] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655043, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.352691] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655042, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.511372] env[62974]: DEBUG nova.network.neutron [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updated VIF entry in instance network info cache for port c699c4dc-40cf-4eaa-9ba6-5e255a43e01a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.511988] env[62974]: DEBUG nova.network.neutron [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [{"id": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "address": "fa:16:3e:06:c2:5a", "network": {"id": "fe2da798-20a5-4c0e-b58e-085c014fc044", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-354302581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28bc1945aba64a2ea67745b0d417b9ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc699c4dc-40", "ovs_interfaceid": "c699c4dc-40cf-4eaa-9ba6-5e255a43e01a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.513174] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.513396] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.513729] env[62974]: DEBUG nova.objects.instance [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'flavor' on Instance uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.590406] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612385ba-564d-4626-9dc1-56b26997c619 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.598454] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee97f468-ce33-4565-8a51-2e181ac2cffc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.634717] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf0295d-22c3-49f6-b249-e0a29238b8fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.642279] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655043, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.645246] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ea8982-95ca-46ef-a6c0-a67298b5092f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.658805] env[62974]: DEBUG nova.compute.provider_tree [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.799104] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1084.799338] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1084.799522] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleting the datastore file [datastore1] dca952df-dac9-4502-948b-24ac6fb939f9 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1084.799792] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af278b7b-6381-4e4f-8f2d-113cc3c1b1b5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.805565] env[62974]: DEBUG oslo_vmware.api [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1084.805565] env[62974]: value = "task-2655045" [ 1084.805565] env[62974]: _type = "Task" [ 1084.805565] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.812803] env[62974]: DEBUG oslo_vmware.api [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.852356] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655042, 'name': CreateVM_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.016067] env[62974]: DEBUG oslo_concurrency.lockutils [req-e1c83714-fa9d-4ed1-aaf3-c2d1b3576c55 req-402d1ae5-21ff-458b-bd33-3d5eeabf4904 service nova] Releasing lock "refresh_cache-c90c9a6d-661f-4574-8a0d-7d8cacf8618d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.083181] env[62974]: DEBUG nova.objects.instance [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'pci_requests' on Instance uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.141604] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655043, 'name': ReconfigVM_Task, 'duration_secs': 1.326962} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.142020] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Reconfigured VM instance instance-0000006b to attach disk [datastore2] b2d46229-31a9-4be1-bd17-5411deb4944c/b2d46229-31a9-4be1-bd17-5411deb4944c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.142504] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8337cf2d-4de9-4d65-8b4e-b03e9711866d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.148454] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1085.148454] env[62974]: value = "task-2655046" [ 1085.148454] env[62974]: _type = "Task" [ 1085.148454] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.156823] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655046, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.161799] env[62974]: DEBUG nova.scheduler.client.report [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.314802] env[62974]: DEBUG oslo_vmware.api [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139774} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.315016] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.315297] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1085.315482] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1085.315656] env[62974]: INFO nova.compute.manager [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Took 2.84 seconds to destroy the instance on the hypervisor. [ 1085.315894] env[62974]: DEBUG oslo.service.loopingcall [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1085.316095] env[62974]: DEBUG nova.compute.manager [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1085.316195] env[62974]: DEBUG nova.network.neutron [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1085.353837] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655042, 'name': CreateVM_Task, 'duration_secs': 1.683949} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.354057] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1085.354728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.354929] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.355340] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1085.355628] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9583b29d-c3fe-4193-86e4-96256f076608 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.359839] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1085.359839] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd8d15-6fa3-b095-2e8f-8416a80452e6" [ 1085.359839] env[62974]: _type = "Task" [ 1085.359839] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.367079] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dd8d15-6fa3-b095-2e8f-8416a80452e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.586045] env[62974]: DEBUG nova.objects.base [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Object Instance<72b0b643-7747-4dae-9d85-c8c6a573ce07> lazy-loaded attributes: flavor,pci_requests {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1085.586198] env[62974]: DEBUG nova.network.neutron [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1085.644208] env[62974]: DEBUG nova.policy [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1085.659410] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655046, 'name': Rename_Task, 'duration_secs': 0.13655} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.659690] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.660090] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24476a6b-6a63-4d42-9390-cc939ebe7957 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.666358] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.666903] env[62974]: DEBUG nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1085.670180] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1085.670180] env[62974]: value = "task-2655047" [ 1085.670180] env[62974]: _type = "Task" [ 1085.670180] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.670500] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.664s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.680849] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655047, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.871534] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.872152] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Processing image ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1085.872152] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.872152] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.872394] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1085.872551] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7656835e-a647-4379-a680-26eda3916235 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.882674] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1085.882818] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1085.883680] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e2b9134-6897-4eec-a984-ade54c340d86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.889978] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1085.889978] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b35a7c-a3a2-96cc-8568-26b3ba512218" [ 1085.889978] env[62974]: _type = "Task" [ 1085.889978] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.897131] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b35a7c-a3a2-96cc-8568-26b3ba512218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.945576] env[62974]: DEBUG nova.compute.manager [req-836eb3c4-fe0c-4f93-84e5-deb414d3b9ec req-64a166fc-b4ea-413c-b879-75358b28ebf8 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Received event network-vif-deleted-a6e1899a-69c5-486d-bfb2-a2f12c06e8ac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1085.945810] env[62974]: INFO nova.compute.manager [req-836eb3c4-fe0c-4f93-84e5-deb414d3b9ec req-64a166fc-b4ea-413c-b879-75358b28ebf8 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Neutron deleted interface a6e1899a-69c5-486d-bfb2-a2f12c06e8ac; detaching it from the instance and deleting it from the info cache [ 1085.946011] env[62974]: DEBUG nova.network.neutron [req-836eb3c4-fe0c-4f93-84e5-deb414d3b9ec req-64a166fc-b4ea-413c-b879-75358b28ebf8 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.002806] env[62974]: DEBUG nova.network.neutron [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Successfully created port: 30b29e6f-4b73-4bb9-9a84-6526189297a0 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.172130] env[62974]: DEBUG nova.compute.utils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1086.173671] env[62974]: DEBUG nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1086.173888] env[62974]: DEBUG nova.network.neutron [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1086.178125] env[62974]: INFO nova.compute.claims [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1086.192720] env[62974]: DEBUG oslo_vmware.api [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655047, 'name': PowerOnVM_Task, 'duration_secs': 0.445031} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.192720] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1086.192720] env[62974]: INFO nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Took 7.37 seconds to spawn the instance on the hypervisor. [ 1086.192720] env[62974]: DEBUG nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1086.193105] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6b2edc-9980-4435-bd03-6be5d17405e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.242334] env[62974]: DEBUG nova.policy [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c69e5ea97264d57978ddcb94ef4bc41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43dc876c8a2346c7bca249407fb7fed8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1086.337813] env[62974]: DEBUG nova.network.neutron [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.401491] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Preparing fetch location {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1086.401837] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Fetch image to [datastore2] OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6/OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6.vmdk {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1086.402053] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Downloading stream optimized image ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a to [datastore2] OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6/OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6.vmdk on the data store datastore2 as vApp {{(pid=62974) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1086.402242] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Downloading image file data ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a to the ESX as VM named 'OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6' {{(pid=62974) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1086.450539] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f16d529b-e4aa-45a4-af6d-0ca7d138b0ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.460681] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facb2d99-2a23-4388-8e99-1721eb798979 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.498503] env[62974]: DEBUG nova.compute.manager [req-836eb3c4-fe0c-4f93-84e5-deb414d3b9ec req-64a166fc-b4ea-413c-b879-75358b28ebf8 service nova] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Detach interface failed, port_id=a6e1899a-69c5-486d-bfb2-a2f12c06e8ac, reason: Instance dca952df-dac9-4502-948b-24ac6fb939f9 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1086.499640] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1086.499640] env[62974]: value = "resgroup-9" [ 1086.499640] env[62974]: _type = "ResourcePool" [ 1086.499640] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1086.499894] env[62974]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f66ffe70-5054-4692-b446-1ed7fb75de2e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.516039] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.516981] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.516981] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Cleaning up deleted instances {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1086.520335] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lease: (returnval){ [ 1086.520335] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1086.520335] env[62974]: _type = "HttpNfcLease" [ 1086.520335] env[62974]: } obtained for vApp import into resource pool (val){ [ 1086.520335] env[62974]: value = "resgroup-9" [ 1086.520335] env[62974]: _type = "ResourcePool" [ 1086.520335] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1086.520564] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the lease: (returnval){ [ 1086.520564] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1086.520564] env[62974]: _type = "HttpNfcLease" [ 1086.520564] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1086.528266] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1086.528266] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1086.528266] env[62974]: _type = "HttpNfcLease" [ 1086.528266] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1086.559809] env[62974]: DEBUG nova.network.neutron [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Successfully created port: 5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.686022] env[62974]: INFO nova.compute.resource_tracker [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating resource usage from migration 1a61954c-c534-4854-94df-272a36bdfb72 [ 1086.687555] env[62974]: DEBUG nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1086.710841] env[62974]: INFO nova.compute.manager [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Took 12.16 seconds to build instance. [ 1086.839680] env[62974]: INFO nova.compute.manager [-] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Took 1.52 seconds to deallocate network for instance. [ 1086.880033] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0709362a-9888-422a-97b7-c0afcc3b5d00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.887766] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23988710-9af4-4108-ab88-eeedc56bec2c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.919962] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a42903-a6c8-45cf-ac00-4bc122b36741 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.926994] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19bbc6b8-4257-420e-a258-302dbb42b1aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.939920] env[62974]: DEBUG nova.compute.provider_tree [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.031426] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] There are 57 instances to clean {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1087.031628] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 1aafddba-5da3-4c46-a537-3c178a1fec88] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1087.040566] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1087.040566] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1087.040566] env[62974]: _type = "HttpNfcLease" [ 1087.040566] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1087.213776] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8eb11c33-7773-4ecf-aa7c-b3115ba1a9a9 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.672s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.350291] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.443271] env[62974]: DEBUG nova.scheduler.client.report [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.539139] env[62974]: DEBUG nova.network.neutron [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Successfully updated port: 30b29e6f-4b73-4bb9-9a84-6526189297a0 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1087.540398] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 8c4a973b-6476-4ac2-ac3c-b4ea2363d7e8] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1087.547366] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1087.547366] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1087.547366] env[62974]: _type = "HttpNfcLease" [ 1087.547366] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1087.704631] env[62974]: DEBUG nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1087.728443] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.728923] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.730027] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.730027] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.730027] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.730027] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.730027] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.730304] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.730587] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.730896] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.731202] env[62974]: DEBUG nova.virt.hardware [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.732152] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372586b1-8b83-4caa-8f7c-31262d1f7f7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.741095] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab69aac-6a97-4220-b89c-b9c9fe625a5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.949434] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.278s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.949434] env[62974]: INFO nova.compute.manager [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Migrating [ 1087.956867] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.542s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.023284] env[62974]: DEBUG nova.network.neutron [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Successfully updated port: 5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.043801] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.043915] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.044009] env[62974]: DEBUG nova.network.neutron [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1088.045180] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 713b503e-43b5-409c-8086-e6d36850f962] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1088.048403] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.048403] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1088.048403] env[62974]: _type = "HttpNfcLease" [ 1088.048403] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1088.121156] env[62974]: DEBUG nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-vif-plugged-30b29e6f-4b73-4bb9-9a84-6526189297a0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1088.121156] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.121156] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.121156] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.121655] env[62974]: DEBUG nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] No waiting events found dispatching network-vif-plugged-30b29e6f-4b73-4bb9-9a84-6526189297a0 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1088.121655] env[62974]: WARNING nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received unexpected event network-vif-plugged-30b29e6f-4b73-4bb9-9a84-6526189297a0 for instance with vm_state active and task_state None. [ 1088.121655] env[62974]: DEBUG nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-changed-30b29e6f-4b73-4bb9-9a84-6526189297a0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1088.121787] env[62974]: DEBUG nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing instance network info cache due to event network-changed-30b29e6f-4b73-4bb9-9a84-6526189297a0. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1088.121971] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.361475] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1088.361797] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535495', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'name': 'volume-a6000f09-20a7-4f82-af23-96b01a5803f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'serial': 'a6000f09-20a7-4f82-af23-96b01a5803f7'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1088.362943] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d6a70c-e05c-4589-88ce-3d830e6fd81c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.379277] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309e76a0-34f5-4c7a-b95f-36e2c6c6a6d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.406279] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-a6000f09-20a7-4f82-af23-96b01a5803f7/volume-a6000f09-20a7-4f82-af23-96b01a5803f7.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.406568] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6f16b27-0cc1-4936-b51f-7b3ffdc11102 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.423890] env[62974]: DEBUG oslo_vmware.api [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1088.423890] env[62974]: value = "task-2655050" [ 1088.423890] env[62974]: _type = "Task" [ 1088.423890] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.437918] env[62974]: DEBUG oslo_vmware.api [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655050, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.465545] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.465708] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.465896] env[62974]: DEBUG nova.network.neutron [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1088.525791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.525791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.525947] env[62974]: DEBUG nova.network.neutron [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1088.545396] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.545396] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1088.545396] env[62974]: _type = "HttpNfcLease" [ 1088.545396] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1088.549652] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6d6331f3-327a-4f11-973e-37c1a3d9701c] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1088.589970] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "b2d46229-31a9-4be1-bd17-5411deb4944c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.590259] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.590478] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "b2d46229-31a9-4be1-bd17-5411deb4944c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.590694] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.590890] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.593317] env[62974]: INFO nova.compute.manager [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Terminating instance [ 1088.595219] env[62974]: WARNING nova.network.neutron [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] ad86c457-3431-4c60-bde9-ddba2b588dde already exists in list: networks containing: ['ad86c457-3431-4c60-bde9-ddba2b588dde']. ignoring it [ 1088.654768] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d62d33-31f9-4b80-8111-7617deb57458 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.665800] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e98b90-de31-43e8-b5ba-16b82b32fa10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.701682] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6650c16f-a4e2-4133-8345-701c2e0830b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.710170] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae1a2a4-fd77-43b4-8040-0129db26c9f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.724781] env[62974]: DEBUG nova.compute.provider_tree [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.934152] env[62974]: DEBUG oslo_vmware.api [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655050, 'name': ReconfigVM_Task, 'duration_secs': 0.376008} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.934453] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-a6000f09-20a7-4f82-af23-96b01a5803f7/volume-a6000f09-20a7-4f82-af23-96b01a5803f7.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.939444] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa78dcb2-4bbd-4a6d-98b2-e38af7f72b7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.950879] env[62974]: DEBUG nova.network.neutron [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "address": "fa:16:3e:b3:1d:10", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b29e6f-4b", "ovs_interfaceid": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.958552] env[62974]: DEBUG oslo_vmware.api [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1088.958552] env[62974]: value = "task-2655051" [ 1088.958552] env[62974]: _type = "Task" [ 1088.958552] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.968195] env[62974]: DEBUG oslo_vmware.api [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655051, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.044264] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1089.044264] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1089.044264] env[62974]: _type = "HttpNfcLease" [ 1089.044264] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1089.052968] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 7b0bb15c-491f-4e29-8ef5-f12a6ac02f3f] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1089.078250] env[62974]: DEBUG nova.network.neutron [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1089.100261] env[62974]: DEBUG nova.compute.manager [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.100470] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.103164] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5441225-838f-445e-bc55-e4fac82f67e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.110758] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.110998] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4c8f0f0-d0a6-4eb3-b81d-9b6d6964408c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.116972] env[62974]: DEBUG oslo_vmware.api [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1089.116972] env[62974]: value = "task-2655052" [ 1089.116972] env[62974]: _type = "Task" [ 1089.116972] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.126183] env[62974]: DEBUG oslo_vmware.api [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.228367] env[62974]: DEBUG nova.scheduler.client.report [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.286662] env[62974]: DEBUG nova.network.neutron [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Updating instance_info_cache with network_info: [{"id": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "address": "fa:16:3e:4d:cf:d4", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ea3e9de-b9", "ovs_interfaceid": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.291169] env[62974]: DEBUG nova.network.neutron [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.456037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.456037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.456037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.456037] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.456492] env[62974]: DEBUG nova.network.neutron [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing network info cache for port 30b29e6f-4b73-4bb9-9a84-6526189297a0 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.459051] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7e0173-58c9-46f1-bed5-38f2d1f1c515 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.470057] env[62974]: DEBUG oslo_vmware.api [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655051, 'name': ReconfigVM_Task, 'duration_secs': 0.136811} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.480705] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535495', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'name': 'volume-a6000f09-20a7-4f82-af23-96b01a5803f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'serial': 'a6000f09-20a7-4f82-af23-96b01a5803f7'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1089.482405] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.482616] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.482771] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.482948] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.483106] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.483253] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.483450] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.483604] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.483767] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.483927] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.484110] env[62974]: DEBUG nova.virt.hardware [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.490205] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfiguring VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1089.490885] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-781457c1-51ec-4ce4-b81c-546e9591c031 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.508626] env[62974]: DEBUG oslo_vmware.api [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1089.508626] env[62974]: value = "task-2655053" [ 1089.508626] env[62974]: _type = "Task" [ 1089.508626] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.516370] env[62974]: DEBUG oslo_vmware.api [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655053, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.544639] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1089.544639] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1089.544639] env[62974]: _type = "HttpNfcLease" [ 1089.544639] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1089.556213] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 7163e48f-8344-4837-bbfd-cbb5741eee5d] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1089.627111] env[62974]: DEBUG oslo_vmware.api [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655052, 'name': PowerOffVM_Task, 'duration_secs': 0.171995} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.627111] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.627302] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.627522] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ef6971e-7c74-43ef-857b-669cdd92fb71 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.739111] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.739329] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.739561] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Deleting the datastore file [datastore2] b2d46229-31a9-4be1-bd17-5411deb4944c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.740089] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0da4b8d-df37-43cf-9c5f-12fa72c3cefd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.746733] env[62974]: DEBUG oslo_vmware.api [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for the task: (returnval){ [ 1089.746733] env[62974]: value = "task-2655055" [ 1089.746733] env[62974]: _type = "Task" [ 1089.746733] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.755737] env[62974]: DEBUG oslo_vmware.api [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.793437] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.793771] env[62974]: DEBUG nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Instance network_info: |[{"id": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "address": "fa:16:3e:4d:cf:d4", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ea3e9de-b9", "ovs_interfaceid": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1089.794273] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.795661] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:cf:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ea3e9de-b94a-4478-9c34-3ec161fff6e7', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1089.803769] env[62974]: DEBUG oslo.service.loopingcall [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1089.804296] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1089.804580] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4509c60f-164f-4b41-814e-a3a9ae8c772f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.827396] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1089.827396] env[62974]: value = "task-2655056" [ 1089.827396] env[62974]: _type = "Task" [ 1089.827396] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.835266] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655056, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.018580] env[62974]: DEBUG oslo_vmware.api [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655053, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.047225] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1090.047225] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1090.047225] env[62974]: _type = "HttpNfcLease" [ 1090.047225] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1090.047517] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1090.047517] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527413bb-dc86-cd44-63bc-ffbba6f34649" [ 1090.047517] env[62974]: _type = "HttpNfcLease" [ 1090.047517] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1090.048393] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64834a3c-d687-4c8b-9723-4ce451fb1a53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.055512] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526cfba3-1e80-1f28-8cf4-20d5d62d31fb/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1090.055684] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526cfba3-1e80-1f28-8cf4-20d5d62d31fb/disk-0.vmdk. {{(pid=62974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1090.060452] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: f586f9a6-1288-4aa2-9052-6e9eb74aac5f] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1090.116959] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c79afcfb-25ce-4130-96d5-5148d968e5bd] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1090.123856] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9a92dff0-1e26-49e1-b9df-df9fdd186c86 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.157437] env[62974]: DEBUG nova.compute.manager [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Received event network-changed-5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1090.157634] env[62974]: DEBUG nova.compute.manager [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Refreshing instance network info cache due to event network-changed-5ea3e9de-b94a-4478-9c34-3ec161fff6e7. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1090.157843] env[62974]: DEBUG oslo_concurrency.lockutils [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] Acquiring lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.157987] env[62974]: DEBUG oslo_concurrency.lockutils [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] Acquired lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.158174] env[62974]: DEBUG nova.network.neutron [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Refreshing network info cache for port 5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.242317] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.285s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.250371] env[62974]: DEBUG nova.network.neutron [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updated VIF entry in instance network info cache for port 30b29e6f-4b73-4bb9-9a84-6526189297a0. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.250371] env[62974]: DEBUG nova.network.neutron [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "address": "fa:16:3e:b3:1d:10", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b29e6f-4b", "ovs_interfaceid": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.250371] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.899s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.250371] env[62974]: DEBUG nova.objects.instance [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'resources' on Instance uuid dca952df-dac9-4502-948b-24ac6fb939f9 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.260807] env[62974]: DEBUG oslo_vmware.api [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Task: {'id': task-2655055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211913} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.261617] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.261805] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.262029] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.262233] env[62974]: INFO nova.compute.manager [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1090.262466] env[62974]: DEBUG oslo.service.loopingcall [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.262643] env[62974]: DEBUG nova.compute.manager [-] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1090.262731] env[62974]: DEBUG nova.network.neutron [-] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1090.337358] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655056, 'name': CreateVM_Task, 'duration_secs': 0.402421} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.337537] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1090.338216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.338483] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.338822] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1090.339100] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26ca81db-fd66-4f74-97fa-9e0454495667 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.344878] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1090.344878] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fc9b2c-f76f-ccfb-bdec-ee3632651f1f" [ 1090.344878] env[62974]: _type = "Task" [ 1090.344878] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.353845] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fc9b2c-f76f-ccfb-bdec-ee3632651f1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.527785] env[62974]: DEBUG nova.objects.instance [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid a44cca2f-9286-490a-9013-1fea30984fa5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.529254] env[62974]: DEBUG oslo_vmware.api [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655053, 'name': ReconfigVM_Task, 'duration_secs': 0.569908} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.529812] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.530058] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfigured VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1090.620859] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 0f19241f-1650-41e5-8fe8-828024bf6aaa] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1090.757357] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.757551] env[62974]: DEBUG nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Received event network-vif-plugged-5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1090.757752] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Acquiring lock "2a962aab-3057-43df-97f7-b63ce808fb90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.757941] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Lock "2a962aab-3057-43df-97f7-b63ce808fb90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.758112] env[62974]: DEBUG oslo_concurrency.lockutils [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] Lock "2a962aab-3057-43df-97f7-b63ce808fb90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.758272] env[62974]: DEBUG nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] No waiting events found dispatching network-vif-plugged-5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1090.758434] env[62974]: WARNING nova.compute.manager [req-9fd65763-27b8-4db3-b4db-93f75c74900a req-0c35b563-6aec-4908-ada7-060afaed3d47 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Received unexpected event network-vif-plugged-5ea3e9de-b94a-4478-9c34-3ec161fff6e7 for instance with vm_state building and task_state spawning. [ 1090.816824] env[62974]: INFO nova.scheduler.client.report [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted allocation for migration 9e89606c-d22f-4fbc-9789-776bf08c4296 [ 1090.866204] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fc9b2c-f76f-ccfb-bdec-ee3632651f1f, 'name': SearchDatastore_Task, 'duration_secs': 0.009163} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.868727] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.868967] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1090.869245] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.869392] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.869614] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1090.872563] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23b0fd23-831e-4774-bac2-9bfd12d54aa0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.884049] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1090.884292] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1090.889841] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be2a3ea7-f907-4ca0-b51b-6ca8cce87444 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.893807] env[62974]: DEBUG nova.network.neutron [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Updated VIF entry in instance network info cache for port 5ea3e9de-b94a-4478-9c34-3ec161fff6e7. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.894257] env[62974]: DEBUG nova.network.neutron [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Updating instance_info_cache with network_info: [{"id": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "address": "fa:16:3e:4d:cf:d4", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ea3e9de-b9", "ovs_interfaceid": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.901090] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1090.901090] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea4a7c-4432-c127-0f09-118cecc17dcc" [ 1090.901090] env[62974]: _type = "Task" [ 1090.901090] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.916530] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ea4a7c-4432-c127-0f09-118cecc17dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.009915} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.918818] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc496c0-2e3b-48d1-bd5d-e2396fc0ca94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.930135] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1090.930135] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c0bbf6-edd9-0945-ce6f-ddb50a357a36" [ 1090.930135] env[62974]: _type = "Task" [ 1090.930135] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.938448] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c0bbf6-edd9-0945-ce6f-ddb50a357a36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.989343] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2ed16b-568f-41e1-9cfb-77d0f19fb696 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.997431] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c19770-3771-4b35-b6aa-cb581a939965 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.002965] env[62974]: DEBUG nova.network.neutron [-] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.042067] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8c3b82-cec5-4dd2-adae-2d9801bd9109 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.048085] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9848eadd-fed7-4800-a0e7-53eb8ab85859 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.532s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.048085] env[62974]: DEBUG oslo_concurrency.lockutils [None req-562b5678-1061-4108-8bf9-c67b2480d1dd tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.289s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.055130] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671400d8-b20d-4fbc-ade8-4fc96d4e4461 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.075628] env[62974]: DEBUG nova.compute.provider_tree [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1091.125252] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: aa6eb55e-79c0-4e1f-8756-05dff97b06d2] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1091.281864] env[62974]: INFO nova.compute.manager [None req-5e8b2c7c-c4c9-48dc-9287-c6e4048b2762 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Get console output [ 1091.282210] env[62974]: WARNING nova.virt.vmwareapi.driver [None req-5e8b2c7c-c4c9-48dc-9287-c6e4048b2762 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] The console log is missing. Check your VSPC configuration [ 1091.318519] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46390f32-90ea-4106-8617-f4b8b64857c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.327294] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4c366c8e-79b1-4952-b22a-3a7d38338bb3 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.522s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.347077] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance '220295bf-b021-4800-bc7e-a3dd311c747a' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1091.398611] env[62974]: DEBUG oslo_concurrency.lockutils [req-9093991b-fd3d-4ef2-85f8-b7c153e87015 req-0378affa-8ea3-42ba-851e-30e5ce76fc73 service nova] Releasing lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.417525] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Completed reading data from the image iterator. {{(pid=62974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1091.417525] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526cfba3-1e80-1f28-8cf4-20d5d62d31fb/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1091.417525] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec68496-805f-4bf3-9078-174915ca01a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.424941] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526cfba3-1e80-1f28-8cf4-20d5d62d31fb/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1091.425132] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526cfba3-1e80-1f28-8cf4-20d5d62d31fb/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1091.425630] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-682da111-8206-47c6-a051-729eb0c88a89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.438587] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c0bbf6-edd9-0945-ce6f-ddb50a357a36, 'name': SearchDatastore_Task, 'duration_secs': 0.016138} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.438809] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.439063] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 2a962aab-3057-43df-97f7-b63ce808fb90/2a962aab-3057-43df-97f7-b63ce808fb90.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1091.439308] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72a2b269-08b7-405a-9120-aaab12f50f09 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.445654] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1091.445654] env[62974]: value = "task-2655057" [ 1091.445654] env[62974]: _type = "Task" [ 1091.445654] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.453062] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.458774] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.459038] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.506191] env[62974]: INFO nova.compute.manager [-] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Took 1.24 seconds to deallocate network for instance. [ 1091.602963] env[62974]: ERROR nova.scheduler.client.report [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [req-0744b61d-3ad1-4d9f-80b5-81b45b043f5e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0744b61d-3ad1-4d9f-80b5-81b45b043f5e"}]} [ 1091.619726] env[62974]: DEBUG nova.scheduler.client.report [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1091.629490] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 92c80524-0fb6-4f28-9a72-bc4ab5793558] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1091.635624] env[62974]: DEBUG nova.scheduler.client.report [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1091.635844] env[62974]: DEBUG nova.compute.provider_tree [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1091.647954] env[62974]: DEBUG nova.scheduler.client.report [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1091.665210] env[62974]: DEBUG oslo_vmware.rw_handles [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526cfba3-1e80-1f28-8cf4-20d5d62d31fb/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1091.665210] env[62974]: INFO nova.virt.vmwareapi.images [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Downloaded image file data ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a [ 1091.665919] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5011e47-b411-4b35-a98d-e93fa2ce8288 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.672117] env[62974]: DEBUG nova.scheduler.client.report [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1091.687778] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5907bd84-8ca9-4cbf-bdd7-49744e435b58 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.720133] env[62974]: INFO nova.virt.vmwareapi.images [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] The imported VM was unregistered [ 1091.723957] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Caching image {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1091.723957] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Creating directory with path [datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.723957] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8072aa79-9d5f-4fbe-b0a7-3edee9af6dd7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.743121] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Created directory with path [datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.743377] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6/OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6.vmdk to [datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a.vmdk. {{(pid=62974) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1091.746175] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8bd6fe9a-be65-4fc9-96fc-b26282993939 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.753565] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1091.753565] env[62974]: value = "task-2655059" [ 1091.753565] env[62974]: _type = "Task" [ 1091.753565] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.765357] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655059, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.860239] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.860728] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aeb2aedb-2216-4eb2-8a4d-9c362dc307eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.867699] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1091.867699] env[62974]: value = "task-2655060" [ 1091.867699] env[62974]: _type = "Task" [ 1091.867699] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.891102] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.925206] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43214fb0-a0a2-4616-afab-1b1e68e045b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.933486] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744093c3-70cb-4122-ae1b-c9f0e37c2b0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.966126] env[62974]: INFO nova.compute.manager [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Detaching volume 9ec1224e-9aca-4ae8-9379-33e489d62a82 [ 1091.972288] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d082f117-ebd9-4b8d-878b-f17d78c9da1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.985474] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655057, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.986896] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bec485b-99c3-4e13-88ee-ffbbb235c567 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.002602] env[62974]: DEBUG nova.compute.provider_tree [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.006994] env[62974]: INFO nova.virt.block_device [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Attempting to driver detach volume 9ec1224e-9aca-4ae8-9379-33e489d62a82 from mountpoint /dev/sdb [ 1092.007243] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1092.007454] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535490', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'name': 'volume-9ec1224e-9aca-4ae8-9379-33e489d62a82', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'serial': '9ec1224e-9aca-4ae8-9379-33e489d62a82'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1092.008300] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbc338c-9952-4d35-912f-12e93c85b101 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.012580] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.036355] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16990b7-7d2e-41f2-a907-64dab6372eda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.043351] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6764e286-8428-4d5f-a4e8-0358bc2a54ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.068224] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa941a0-5932-4e71-b815-4ebeed5228a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.085514] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] The volume has not been displaced from its original location: [datastore2] volume-9ec1224e-9aca-4ae8-9379-33e489d62a82/volume-9ec1224e-9aca-4ae8-9379-33e489d62a82.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1092.091562] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1092.092083] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4066f76-c289-47a9-9db2-053931d1b2a7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.116228] env[62974]: DEBUG oslo_vmware.api [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1092.116228] env[62974]: value = "task-2655061" [ 1092.116228] env[62974]: _type = "Task" [ 1092.116228] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.126088] env[62974]: DEBUG oslo_vmware.api [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655061, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.132882] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 18489c02-5958-431f-aede-f554d0d785ed] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1092.263582] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655059, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.333989] env[62974]: DEBUG nova.compute.manager [req-e568109f-6c65-491b-9241-bf4fa1817caf req-59c62e56-e854-4970-99ed-84e51acde57e service nova] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Received event network-vif-deleted-9d3a88c4-e165-4874-8774-791991bbb3bb {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1092.377579] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655060, 'name': PowerOffVM_Task, 'duration_secs': 0.224051} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.377853] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.378057] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance '220295bf-b021-4800-bc7e-a3dd311c747a' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1092.475017] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558558} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.475310] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 2a962aab-3057-43df-97f7-b63ce808fb90/2a962aab-3057-43df-97f7-b63ce808fb90.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1092.475525] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1092.475788] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13889817-8a80-4535-8fb3-9b2de06a9509 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.482892] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1092.482892] env[62974]: value = "task-2655062" [ 1092.482892] env[62974]: _type = "Task" [ 1092.482892] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.491473] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.538752] env[62974]: DEBUG nova.scheduler.client.report [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 147 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1092.539105] env[62974]: DEBUG nova.compute.provider_tree [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 147 to 148 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1092.539299] env[62974]: DEBUG nova.compute.provider_tree [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.628043] env[62974]: DEBUG oslo_vmware.api [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.636676] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: ef54d01a-5d2c-448a-a060-37520de396ca] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1092.763564] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655059, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.885524] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.885765] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.885891] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.886187] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.886364] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.886514] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.886721] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.886878] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.887101] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.887338] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.887535] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.894395] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1ef4523-48d0-4c82-8be0-326e0105cb40 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.916645] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1092.916645] env[62974]: value = "task-2655063" [ 1092.916645] env[62974]: _type = "Task" [ 1092.916645] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.925302] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655063, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.992878] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07969} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.993203] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1092.994041] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe654beb-672a-43f7-9b18-f51b016a5e78 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.016968] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 2a962aab-3057-43df-97f7-b63ce808fb90/2a962aab-3057-43df-97f7-b63ce808fb90.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.017333] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d476e9ec-beb1-457f-84cc-391182a36e7c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.039172] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1093.039172] env[62974]: value = "task-2655064" [ 1093.039172] env[62974]: _type = "Task" [ 1093.039172] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.044946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.796s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.051267] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.039s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.051600] env[62974]: DEBUG nova.objects.instance [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lazy-loading 'resources' on Instance uuid b2d46229-31a9-4be1-bd17-5411deb4944c {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.052989] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655064, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.134114] env[62974]: DEBUG oslo_vmware.api [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655061, 'name': ReconfigVM_Task, 'duration_secs': 0.643396} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.134617] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1093.143550] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b0ca30eb-b12a-49bc-9af3-5de1ad1b7fa4] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1093.146577] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc51c292-c60b-4288-aaee-4cb60dbf5b65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.173846] env[62974]: DEBUG oslo_vmware.api [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1093.173846] env[62974]: value = "task-2655065" [ 1093.173846] env[62974]: _type = "Task" [ 1093.173846] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.175602] env[62974]: INFO nova.scheduler.client.report [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted allocations for instance dca952df-dac9-4502-948b-24ac6fb939f9 [ 1093.202883] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-ed04cd61-079c-4a14-8e69-0046a9247550" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.203344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-ed04cd61-079c-4a14-8e69-0046a9247550" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.203898] env[62974]: DEBUG nova.objects.instance [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'flavor' on Instance uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.206034] env[62974]: DEBUG oslo_vmware.api [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.266345] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655059, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.427737] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655063, 'name': ReconfigVM_Task, 'duration_secs': 0.291729} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.427881] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance '220295bf-b021-4800-bc7e-a3dd311c747a' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1093.550378] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655064, 'name': ReconfigVM_Task, 'duration_secs': 0.41647} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.550760] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 2a962aab-3057-43df-97f7-b63ce808fb90/2a962aab-3057-43df-97f7-b63ce808fb90.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1093.551508] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb2ff475-7958-47cd-a86a-5778a5592f8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.558022] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1093.558022] env[62974]: value = "task-2655066" [ 1093.558022] env[62974]: _type = "Task" [ 1093.558022] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.566513] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655066, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.669274] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cc7c25b5-1463-4eab-8d8f-f812d4f16c34] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1093.691800] env[62974]: DEBUG oslo_vmware.api [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655065, 'name': ReconfigVM_Task, 'duration_secs': 0.187431} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.692159] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535490', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'name': 'volume-9ec1224e-9aca-4ae8-9379-33e489d62a82', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': '9ec1224e-9aca-4ae8-9379-33e489d62a82', 'serial': '9ec1224e-9aca-4ae8-9379-33e489d62a82'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1093.695520] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ff7e2e3e-913a-4fa9-a71f-03100616b155 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "dca952df-dac9-4502-948b-24ac6fb939f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.731s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.740014] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78499d3c-2345-4f24-9511-e9e0e099311e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.749230] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4f52bc-31cf-48dd-af3a-a186284ebd89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.791079] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93220e0-26e3-4245-aa09-17e5194c8acb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.799833] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655059, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.803191] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8a7695-cb30-458e-8f64-2444a94b4695 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.817505] env[62974]: DEBUG nova.compute.provider_tree [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.828202] env[62974]: DEBUG nova.objects.instance [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'pci_requests' on Instance uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.935662] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1093.935944] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1093.936158] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1093.936308] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1093.936452] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1093.936601] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1093.936828] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1093.937020] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1093.937197] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1093.937358] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1093.937528] env[62974]: DEBUG nova.virt.hardware [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1093.944041] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1093.944388] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3701d786-3cde-410a-85cc-5d3f40aa6940 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.964308] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1093.964308] env[62974]: value = "task-2655067" [ 1093.964308] env[62974]: _type = "Task" [ 1093.964308] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.976522] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655067, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.069389] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655066, 'name': Rename_Task, 'duration_secs': 0.175887} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.069389] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1094.069627] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f1f0a04-1396-42f5-91e9-98127441aa4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.077341] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1094.077341] env[62974]: value = "task-2655068" [ 1094.077341] env[62974]: _type = "Task" [ 1094.077341] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.086333] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.172796] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 32b17ff4-f7e1-498d-aef7-162f81cd5feb] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1094.245118] env[62974]: DEBUG nova.objects.instance [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid a44cca2f-9286-490a-9013-1fea30984fa5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.268515] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655059, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.320532] env[62974]: DEBUG nova.scheduler.client.report [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.331360] env[62974]: DEBUG nova.objects.base [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Object Instance<72b0b643-7747-4dae-9d85-c8c6a573ce07> lazy-loaded attributes: flavor,pci_requests {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1094.331572] env[62974]: DEBUG nova.network.neutron [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1094.394888] env[62974]: DEBUG nova.policy [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1094.474941] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655067, 'name': ReconfigVM_Task, 'duration_secs': 0.405387} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.476301] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1094.476301] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec04ea3a-868e-4bb1-9f84-993ab632b879 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.503332] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1094.503617] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae5554dd-6060-4296-8e6e-934697c503ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.522201] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1094.522201] env[62974]: value = "task-2655069" [ 1094.522201] env[62974]: _type = "Task" [ 1094.522201] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.530381] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.586341] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655068, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.677130] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 226f3328-e3b1-4ae1-8b7c-349b552cf5a2] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1094.767922] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655059, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.680982} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.768195] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6/OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6.vmdk to [datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a.vmdk. [ 1094.768375] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Cleaning up location [datastore2] OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1094.768534] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_5f3a20ca-6f72-4f2e-b42f-a98cf47f92f6 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.769025] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-764c4591-7c9a-4de1-adac-cf890f341ccc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.775405] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1094.775405] env[62974]: value = "task-2655070" [ 1094.775405] env[62974]: _type = "Task" [ 1094.775405] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.782977] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655070, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.826188] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.775s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.843227] env[62974]: INFO nova.scheduler.client.report [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Deleted allocations for instance b2d46229-31a9-4be1-bd17-5411deb4944c [ 1095.032115] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655069, 'name': ReconfigVM_Task, 'duration_secs': 0.282362} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.032397] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.032685] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance '220295bf-b021-4800-bc7e-a3dd311c747a' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1095.087381] env[62974]: DEBUG oslo_vmware.api [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655068, 'name': PowerOnVM_Task, 'duration_secs': 0.645518} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.087665] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1095.087874] env[62974]: INFO nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Took 7.38 seconds to spawn the instance on the hypervisor. [ 1095.088066] env[62974]: DEBUG nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1095.088881] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa6cd81-f484-4655-bae3-1e74dbb32140 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.180254] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: eb8647c7-f5e1-4de5-8321-9a9ecff5961c] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1095.254597] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1a32757b-1cf5-4a56-bd38-aaef213ceb01 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.795s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.259706] env[62974]: DEBUG nova.compute.manager [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1095.285704] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655070, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154169} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.285965] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1095.286154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.286398] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a.vmdk to [datastore2] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1095.286646] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de107097-7be6-432e-8737-a393bae6cdfd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.292675] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1095.292675] env[62974]: value = "task-2655071" [ 1095.292675] env[62974]: _type = "Task" [ 1095.292675] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.302345] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.354148] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2656d89b-a26a-4984-bb14-88db7f0fb4d1 tempest-ServerTagsTestJSON-2035732421 tempest-ServerTagsTestJSON-2035732421-project-member] Lock "b2d46229-31a9-4be1-bd17-5411deb4944c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.764s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.540032] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2067d58c-2145-400d-af18-8d69c12a5c13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.564187] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bbd5c5-91c9-47f4-9ab7-a27d9ce27b60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.585399] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance '220295bf-b021-4800-bc7e-a3dd311c747a' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1095.607729] env[62974]: INFO nova.compute.manager [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Took 13.82 seconds to build instance. [ 1095.683532] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c08ed924-9b7d-4773-8e49-c57ecfb27d03] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1095.780491] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.780965] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.803157] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655071, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.856817] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.857118] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.111898] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a22545e1-a7dc-4cd8-a876-d0f975b78815 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "2a962aab-3057-43df-97f7-b63ce808fb90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.330s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.187397] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: e11408df-466c-4101-b0cc-3621cda78a45] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1096.286478] env[62974]: INFO nova.compute.claims [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1096.306510] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655071, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.360666] env[62974]: INFO nova.compute.manager [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Detaching volume a6000f09-20a7-4f82-af23-96b01a5803f7 [ 1096.398372] env[62974]: INFO nova.virt.block_device [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Attempting to driver detach volume a6000f09-20a7-4f82-af23-96b01a5803f7 from mountpoint /dev/sdc [ 1096.398554] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1096.398716] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535495', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'name': 'volume-a6000f09-20a7-4f82-af23-96b01a5803f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'serial': 'a6000f09-20a7-4f82-af23-96b01a5803f7'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1096.399650] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b19b5de-58cd-45b3-8c64-b8b4812dfcb1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.408466] env[62974]: DEBUG nova.compute.manager [req-3f9ad3b2-d587-4444-90e5-641f7c7d9514 req-0e206935-9be6-4536-8079-4f6fe4672a25 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-vif-plugged-ed04cd61-079c-4a14-8e69-0046a9247550 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1096.408466] env[62974]: DEBUG oslo_concurrency.lockutils [req-3f9ad3b2-d587-4444-90e5-641f7c7d9514 req-0e206935-9be6-4536-8079-4f6fe4672a25 service nova] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.408466] env[62974]: DEBUG oslo_concurrency.lockutils [req-3f9ad3b2-d587-4444-90e5-641f7c7d9514 req-0e206935-9be6-4536-8079-4f6fe4672a25 service nova] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.408466] env[62974]: DEBUG oslo_concurrency.lockutils [req-3f9ad3b2-d587-4444-90e5-641f7c7d9514 req-0e206935-9be6-4536-8079-4f6fe4672a25 service nova] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.408466] env[62974]: DEBUG nova.compute.manager [req-3f9ad3b2-d587-4444-90e5-641f7c7d9514 req-0e206935-9be6-4536-8079-4f6fe4672a25 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] No waiting events found dispatching network-vif-plugged-ed04cd61-079c-4a14-8e69-0046a9247550 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1096.408466] env[62974]: WARNING nova.compute.manager [req-3f9ad3b2-d587-4444-90e5-641f7c7d9514 req-0e206935-9be6-4536-8079-4f6fe4672a25 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received unexpected event network-vif-plugged-ed04cd61-079c-4a14-8e69-0046a9247550 for instance with vm_state active and task_state None. [ 1096.431921] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166fa17c-99fa-44d6-bcca-aea6246caaa8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.441913] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca14c470-63a2-4656-91a7-35a387615d19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.471688] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5a0b05-d1ce-4b7b-ac17-1e0c1adf56b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.489659] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] The volume has not been displaced from its original location: [datastore2] volume-a6000f09-20a7-4f82-af23-96b01a5803f7/volume-a6000f09-20a7-4f82-af23-96b01a5803f7.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1096.495112] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfiguring VM instance instance-00000065 to detach disk 2002 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1096.495556] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8e224d7-cb5d-4494-a281-1db5eb4a347f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.515688] env[62974]: DEBUG oslo_vmware.api [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1096.515688] env[62974]: value = "task-2655072" [ 1096.515688] env[62974]: _type = "Task" [ 1096.515688] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.524546] env[62974]: DEBUG oslo_vmware.api [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655072, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.673193] env[62974]: DEBUG nova.network.neutron [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Successfully updated port: ed04cd61-079c-4a14-8e69-0046a9247550 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.691455] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 79448002-daa3-4afd-bd1b-36d734642a9e] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1096.798066] env[62974]: INFO nova.compute.resource_tracker [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating resource usage from migration b47d67b1-b862-4c01-9dc5-efc0452e1e77 [ 1096.813680] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655071, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.991974] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cb538e-4171-459e-9857-6c24ff8c19ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.000039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e70948-7261-45fa-899e-8d2c0c54673e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.040279] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1623954c-83e2-40a1-b9a8-894dd59f2d9c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.049536] env[62974]: DEBUG oslo_vmware.api [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655072, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.052870] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ce8d6e-f6da-4f70-bb55-c07b9761fa77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.068525] env[62974]: DEBUG nova.compute.provider_tree [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1097.176221] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.176678] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.176861] env[62974]: DEBUG nova.network.neutron [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.194503] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 70adaccf-44ab-44b1-ac8a-005d42c09f0a] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1097.284220] env[62974]: DEBUG nova.network.neutron [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Port daa4e7d6-34e5-4455-b28f-6ee056ef2e93 binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1097.314239] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655071, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.544443] env[62974]: DEBUG oslo_vmware.api [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655072, 'name': ReconfigVM_Task, 'duration_secs': 0.568047} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.544751] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Reconfigured VM instance instance-00000065 to detach disk 2002 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1097.551106] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f018d55-6bd4-4f4a-8bd7-8af804ae5866 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.569080] env[62974]: DEBUG oslo_vmware.api [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1097.569080] env[62974]: value = "task-2655073" [ 1097.569080] env[62974]: _type = "Task" [ 1097.569080] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.582992] env[62974]: DEBUG oslo_vmware.api [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655073, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.609022] env[62974]: ERROR nova.scheduler.client.report [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [req-cf24ac8f-3e9c-46eb-b5fd-cb03ac4f845b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cf24ac8f-3e9c-46eb-b5fd-cb03ac4f845b"}]} [ 1097.630060] env[62974]: DEBUG nova.scheduler.client.report [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1097.648482] env[62974]: DEBUG nova.scheduler.client.report [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1097.648817] env[62974]: DEBUG nova.compute.provider_tree [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1097.667365] env[62974]: DEBUG nova.scheduler.client.report [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1097.698717] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 3df97cea-5a6e-4d7a-b2f3-e02213816e24] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1097.742765] env[62974]: WARNING nova.network.neutron [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] ad86c457-3431-4c60-bde9-ddba2b588dde already exists in list: networks containing: ['ad86c457-3431-4c60-bde9-ddba2b588dde']. ignoring it [ 1097.742765] env[62974]: WARNING nova.network.neutron [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] ad86c457-3431-4c60-bde9-ddba2b588dde already exists in list: networks containing: ['ad86c457-3431-4c60-bde9-ddba2b588dde']. ignoring it [ 1097.795667] env[62974]: DEBUG nova.scheduler.client.report [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1097.813667] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655071, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.016988] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a77697c-92a1-4ab0-a7fb-f0898912c08a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.024691] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126f6381-6850-4955-9f7a-13ebb61af152 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.063545] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31349a2c-d1ad-44e8-90f9-cefd7b9f7203 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.076678] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda714a9-d6aa-4399-a9ce-be4ae5e93551 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.088572] env[62974]: DEBUG oslo_vmware.api [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655073, 'name': ReconfigVM_Task, 'duration_secs': 0.226668} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.098072] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535495', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'name': 'volume-a6000f09-20a7-4f82-af23-96b01a5803f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a44cca2f-9286-490a-9013-1fea30984fa5', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6000f09-20a7-4f82-af23-96b01a5803f7', 'serial': 'a6000f09-20a7-4f82-af23-96b01a5803f7'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1098.100882] env[62974]: DEBUG nova.compute.provider_tree [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1098.202664] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c1d0b90c-aa1c-485d-850d-a1495feac7c9] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1098.303524] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.303889] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.304159] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.325855] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655071, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.727459} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.326361] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a/ed588ead-2e1c-4f01-9dba-9ca0a9c7a60a.vmdk to [datastore2] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1098.327152] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0a1de6-321c-4fc4-9c14-4b6e15d9fb3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.349862] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.351116] env[62974]: DEBUG nova.network.neutron [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "address": "fa:16:3e:b3:1d:10", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b29e6f-4b", "ovs_interfaceid": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ed04cd61-079c-4a14-8e69-0046a9247550", "address": "fa:16:3e:5b:0c:dc", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped04cd61-07", "ovs_interfaceid": "ed04cd61-079c-4a14-8e69-0046a9247550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.352654] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1d8831f-ba07-401f-8f8d-5d5edc1ecbd6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.369060] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.369634] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.369790] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.371398] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7634b35-355d-45e8-ad9a-52781a81e25b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.389863] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.390107] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.390262] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.390440] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.390586] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.390757] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.390967] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.391154] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.391317] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.391475] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.391645] env[62974]: DEBUG nova.virt.hardware [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.398120] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfiguring VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1098.400201] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3aaae1ec-87d9-4ff3-84ab-6f14317faa76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.412092] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1098.412092] env[62974]: value = "task-2655074" [ 1098.412092] env[62974]: _type = "Task" [ 1098.412092] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.418089] env[62974]: DEBUG oslo_vmware.api [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1098.418089] env[62974]: value = "task-2655075" [ 1098.418089] env[62974]: _type = "Task" [ 1098.418089] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.421305] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.428894] env[62974]: DEBUG oslo_vmware.api [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655075, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.447635] env[62974]: DEBUG nova.compute.manager [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-changed-ed04cd61-079c-4a14-8e69-0046a9247550 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1098.447826] env[62974]: DEBUG nova.compute.manager [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing instance network info cache due to event network-changed-ed04cd61-079c-4a14-8e69-0046a9247550. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1098.448040] env[62974]: DEBUG oslo_concurrency.lockutils [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.448181] env[62974]: DEBUG oslo_concurrency.lockutils [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.448334] env[62974]: DEBUG nova.network.neutron [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Refreshing network info cache for port ed04cd61-079c-4a14-8e69-0046a9247550 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.641856] env[62974]: DEBUG nova.scheduler.client.report [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1098.642155] env[62974]: DEBUG nova.compute.provider_tree [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 150 to 151 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1098.642344] env[62974]: DEBUG nova.compute.provider_tree [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1098.653367] env[62974]: DEBUG nova.objects.instance [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'flavor' on Instance uuid a44cca2f-9286-490a-9013-1fea30984fa5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.706253] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6243cce3-8611-46fa-8379-e2f3c825c4dd] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1098.922382] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655074, 'name': ReconfigVM_Task, 'duration_secs': 0.346969} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.925232] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Reconfigured VM instance instance-00000060 to attach disk [datastore2] c90c9a6d-661f-4574-8a0d-7d8cacf8618d/c90c9a6d-661f-4574-8a0d-7d8cacf8618d.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.925848] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba402a30-a7bb-4f18-bd3a-d8ad235bd80d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.932604] env[62974]: DEBUG oslo_vmware.api [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.933727] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1098.933727] env[62974]: value = "task-2655076" [ 1098.933727] env[62974]: _type = "Task" [ 1098.933727] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.940939] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655076, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.146062] env[62974]: DEBUG nova.network.neutron [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updated VIF entry in instance network info cache for port ed04cd61-079c-4a14-8e69-0046a9247550. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.146554] env[62974]: DEBUG nova.network.neutron [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "address": "fa:16:3e:b3:1d:10", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b29e6f-4b", "ovs_interfaceid": "30b29e6f-4b73-4bb9-9a84-6526189297a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ed04cd61-079c-4a14-8e69-0046a9247550", "address": "fa:16:3e:5b:0c:dc", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped04cd61-07", "ovs_interfaceid": "ed04cd61-079c-4a14-8e69-0046a9247550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.148208] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.367s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.148385] env[62974]: INFO nova.compute.manager [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Migrating [ 1099.208740] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: e23dbff7-d23e-4909-9b33-67ed15c325e7] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1099.360282] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.360477] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.360653] env[62974]: DEBUG nova.network.neutron [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.431922] env[62974]: DEBUG oslo_vmware.api [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.442000] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655076, 'name': Rename_Task, 'duration_secs': 0.150338} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.442276] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1099.442505] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c62b7b10-8a10-41cc-b99a-7e5b68f4344a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.448084] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1099.448084] env[62974]: value = "task-2655077" [ 1099.448084] env[62974]: _type = "Task" [ 1099.448084] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.455018] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.655601] env[62974]: DEBUG oslo_concurrency.lockutils [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.655994] env[62974]: DEBUG nova.compute.manager [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Received event network-changed-5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1099.656257] env[62974]: DEBUG nova.compute.manager [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Refreshing instance network info cache due to event network-changed-5ea3e9de-b94a-4478-9c34-3ec161fff6e7. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1099.656530] env[62974]: DEBUG oslo_concurrency.lockutils [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] Acquiring lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.656717] env[62974]: DEBUG oslo_concurrency.lockutils [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] Acquired lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.656911] env[62974]: DEBUG nova.network.neutron [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Refreshing network info cache for port 5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1099.664190] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.665022] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.665022] env[62974]: DEBUG nova.network.neutron [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.665889] env[62974]: DEBUG oslo_concurrency.lockutils [None req-18a107f1-3a35-42b4-bab6-2bf448dea572 tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.809s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.712063] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: e42547b0-25b7-4a34-b832-b93103065928] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1099.936428] env[62974]: DEBUG oslo_vmware.api [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655075, 'name': ReconfigVM_Task, 'duration_secs': 1.401205} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.937054] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.937294] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfigured VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1099.960218] env[62974]: DEBUG oslo_vmware.api [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655077, 'name': PowerOnVM_Task, 'duration_secs': 0.472133} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.962394] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1100.070072] env[62974]: DEBUG nova.compute.manager [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1100.071170] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dbd78d-5d36-406f-ac88-9a3894e935f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.089597] env[62974]: DEBUG nova.network.neutron [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.215040] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 55229db9-9442-4973-a1f2-7762227167a4] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1100.443171] env[62974]: DEBUG oslo_concurrency.lockutils [None req-40dbac2d-4bd8-4b70-a185-2fc4f5150846 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-ed04cd61-079c-4a14-8e69-0046a9247550" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.240s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.496600] env[62974]: DEBUG nova.network.neutron [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Updated VIF entry in instance network info cache for port 5ea3e9de-b94a-4478-9c34-3ec161fff6e7. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1100.496984] env[62974]: DEBUG nova.network.neutron [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Updating instance_info_cache with network_info: [{"id": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "address": "fa:16:3e:4d:cf:d4", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ea3e9de-b9", "ovs_interfaceid": "5ea3e9de-b94a-4478-9c34-3ec161fff6e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.499097] env[62974]: DEBUG nova.network.neutron [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.580322] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.580605] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.580912] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "a44cca2f-9286-490a-9013-1fea30984fa5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.581162] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.581364] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.583570] env[62974]: INFO nova.compute.manager [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Terminating instance [ 1100.590774] env[62974]: DEBUG oslo_concurrency.lockutils [None req-962fa3e1-f2b7-48e1-ba70-8431ab7e333a tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 27.006s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.591761] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.719498] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 14523914-68ab-4d39-8eb8-6a786ddcb4dc] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1100.999701] env[62974]: DEBUG oslo_concurrency.lockutils [req-df7a0320-1adf-4732-9e89-6bdf678122b5 req-947cf4c2-bb17-4a9c-a52f-86cd513fae60 service nova] Releasing lock "refresh_cache-2a962aab-3057-43df-97f7-b63ce808fb90" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.000748] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.091085] env[62974]: DEBUG nova.compute.manager [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1101.091357] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1101.092329] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f5edf2-f602-40a2-943f-59abf3976980 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.100235] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee2b0e0-dbf6-4a18-aaf6-5c7e7f005803 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.104898] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1101.105324] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9419ded-24c2-4423-8ae6-3bea2bc25815 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.108727] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94911106-7bf1-4dd8-8037-59dd9779fa76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.113854] env[62974]: DEBUG oslo_vmware.api [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1101.113854] env[62974]: value = "task-2655078" [ 1101.113854] env[62974]: _type = "Task" [ 1101.113854] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.130201] env[62974]: DEBUG oslo_vmware.api [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655078, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.222583] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6e8f07c2-60da-4bad-a7af-8c83294e232f] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1101.624221] env[62974]: DEBUG oslo_vmware.api [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655078, 'name': PowerOffVM_Task, 'duration_secs': 0.218816} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.624490] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1101.624649] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1101.624879] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7cd2eea-b8dd-4e1c-aebe-62e4482f6ca5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.695419] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1101.695641] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1101.695820] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Deleting the datastore file [datastore1] a44cca2f-9286-490a-9013-1fea30984fa5 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.696094] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0be64259-8891-40b5-ba94-776f014484f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.704369] env[62974]: DEBUG oslo_vmware.api [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for the task: (returnval){ [ 1101.704369] env[62974]: value = "task-2655080" [ 1101.704369] env[62974]: _type = "Task" [ 1101.704369] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.712560] env[62974]: DEBUG oslo_vmware.api [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.727344] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6c7401b6-a69f-4de3-aeb9-26c727d57b76] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1102.066769] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-30b29e6f-4b73-4bb9-9a84-6526189297a0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.067078] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-30b29e6f-4b73-4bb9-9a84-6526189297a0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.214049] env[62974]: DEBUG oslo_vmware.api [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Task: {'id': task-2655080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126982} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.214049] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1102.214318] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1102.214414] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1102.214518] env[62974]: INFO nova.compute.manager [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1102.215422] env[62974]: DEBUG oslo.service.loopingcall [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1102.215422] env[62974]: DEBUG nova.compute.manager [-] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1102.215422] env[62974]: DEBUG nova.network.neutron [-] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1102.221298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651060ef-5236-46c1-9a6e-1633c9bce8ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.243756] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 0bc05477-1802-4f8b-8d23-2742f9baf603] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1102.246253] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27e99c2-3f25-471f-b714-5c23b64af663 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.265085] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance '220295bf-b021-4800-bc7e-a3dd311c747a' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1102.516886] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24151cee-0f2a-4777-af3b-8d593b857bf8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.538720] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance '514e0f15-f27d-4fab-9107-b92884075420' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1102.570344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.570554] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.574025] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f438fd1-ca3d-4fe0-b25b-f2644ad87820 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.593141] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3a3930-dd22-4323-b6cc-3c5f36ce86af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.622044] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfiguring VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1102.622728] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65e4d3ca-44cd-4cf3-ac6e-775acf05e07b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.641403] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1102.641403] env[62974]: value = "task-2655081" [ 1102.641403] env[62974]: _type = "Task" [ 1102.641403] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.649739] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.750262] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: eb7dcbbb-1d90-44e2-bd50-6d9bd8f7bf26] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1102.774396] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1102.774396] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69063fe2-e5d2-4f0e-8eb6-004d06d0d03d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.783994] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1102.783994] env[62974]: value = "task-2655082" [ 1102.783994] env[62974]: _type = "Task" [ 1102.783994] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.795249] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.956243] env[62974]: DEBUG nova.compute.manager [req-67db5235-41f6-467f-81f3-e4971189c1fc req-ffbcc233-29d4-4302-9c76-1160cc998e65 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Received event network-vif-deleted-3ee36563-83e1-498a-a5a3-81a8ff2ee417 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1102.956543] env[62974]: INFO nova.compute.manager [req-67db5235-41f6-467f-81f3-e4971189c1fc req-ffbcc233-29d4-4302-9c76-1160cc998e65 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Neutron deleted interface 3ee36563-83e1-498a-a5a3-81a8ff2ee417; detaching it from the instance and deleting it from the info cache [ 1102.956690] env[62974]: DEBUG nova.network.neutron [req-67db5235-41f6-467f-81f3-e4971189c1fc req-ffbcc233-29d4-4302-9c76-1160cc998e65 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.046416] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.046852] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0cfb5fa-504b-4fc4-89ff-6249c49d150d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.054191] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1103.054191] env[62974]: value = "task-2655083" [ 1103.054191] env[62974]: _type = "Task" [ 1103.054191] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.062917] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.151410] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.254376] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b31dea29-79d6-4117-bdb5-2d38fb660a53] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1103.293948] env[62974]: DEBUG oslo_vmware.api [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655082, 'name': PowerOnVM_Task, 'duration_secs': 0.397182} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.294241] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.294418] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f820068d-bc3a-40e1-bd87-17090a424661 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance '220295bf-b021-4800-bc7e-a3dd311c747a' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1103.436963] env[62974]: DEBUG nova.network.neutron [-] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.459420] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6560f1d-949d-424d-8d35-887af8dd3ea9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.469048] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df62169-a741-4465-9168-8e9eb94130ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.501674] env[62974]: DEBUG nova.compute.manager [req-67db5235-41f6-467f-81f3-e4971189c1fc req-ffbcc233-29d4-4302-9c76-1160cc998e65 service nova] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Detach interface failed, port_id=3ee36563-83e1-498a-a5a3-81a8ff2ee417, reason: Instance a44cca2f-9286-490a-9013-1fea30984fa5 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1103.563412] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655083, 'name': PowerOffVM_Task, 'duration_secs': 0.223981} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.563688] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1103.563870] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance '514e0f15-f27d-4fab-9107-b92884075420' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1103.652017] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.757704] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 1c7fabf7-ba82-4628-9016-b0f198add99a] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1103.939836] env[62974]: INFO nova.compute.manager [-] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Took 1.72 seconds to deallocate network for instance. [ 1104.070242] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1104.070540] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.070719] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.070957] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.071145] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.071310] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1104.071542] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1104.071713] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1104.071893] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1104.072073] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1104.072259] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1104.077424] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0403c140-af36-4db1-9b3e-63283c60a49f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.093133] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1104.093133] env[62974]: value = "task-2655084" [ 1104.093133] env[62974]: _type = "Task" [ 1104.093133] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.101578] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655084, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.151455] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.262307] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d6ce3f68-a757-48bc-abeb-49c3aacdf465] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1104.446241] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.446564] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.446795] env[62974]: DEBUG nova.objects.instance [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lazy-loading 'resources' on Instance uuid a44cca2f-9286-490a-9013-1fea30984fa5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.603522] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655084, 'name': ReconfigVM_Task, 'duration_secs': 0.361761} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.603822] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance '514e0f15-f27d-4fab-9107-b92884075420' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1104.653023] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.765934] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 5d6a072e-dba7-461d-9d41-8ca003b31102] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1105.103395] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4d1a49-b101-4ddc-acf3-1f516dea8617 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.109634] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1105.109874] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.110080] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1105.110306] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.110489] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1105.110672] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1105.110924] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1105.111124] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1105.111325] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1105.111520] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1105.111726] env[62974]: DEBUG nova.virt.hardware [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1105.117677] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Reconfiguring VM instance instance-00000049 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1105.119873] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95b3c26a-39ce-4b44-9b29-9152f89bef63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.135559] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8008f174-5bfe-4de0-bba6-3609ff0e06b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.170183] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4c5851-5f69-44f5-99ae-ec4a7b7c38df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.172890] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1105.172890] env[62974]: value = "task-2655085" [ 1105.172890] env[62974]: _type = "Task" [ 1105.172890] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.182236] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.184459] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb9829f-0e4b-439e-bd1d-8670ac362121 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.191897] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.202991] env[62974]: DEBUG nova.compute.provider_tree [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.268932] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: af370de1-e4d7-4312-bc72-c6398eeaf2ed] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1105.365371] env[62974]: DEBUG nova.network.neutron [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Port daa4e7d6-34e5-4455-b28f-6ee056ef2e93 binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1105.365632] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.365787] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.365950] env[62974]: DEBUG nova.network.neutron [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1105.677549] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.685188] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655085, 'name': ReconfigVM_Task, 'duration_secs': 0.173147} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.685476] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Reconfigured VM instance instance-00000049 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1105.686230] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f26535-1aed-41ca-828c-bbe630360e72 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.707265] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 514e0f15-f27d-4fab-9107-b92884075420/514e0f15-f27d-4fab-9107-b92884075420.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.708120] env[62974]: DEBUG nova.scheduler.client.report [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.711071] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83913064-c10d-4e7c-af0d-fb26df33bc5e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.728808] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1105.728808] env[62974]: value = "task-2655086" [ 1105.728808] env[62974]: _type = "Task" [ 1105.728808] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.736788] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655086, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.772952] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6cee3cf6-2105-40f7-b7f2-5bd38a01a08b] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1106.100909] env[62974]: DEBUG nova.network.neutron [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.179749] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.225385] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.238724] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655086, 'name': ReconfigVM_Task, 'duration_secs': 0.37606} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.239073] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 514e0f15-f27d-4fab-9107-b92884075420/514e0f15-f27d-4fab-9107-b92884075420.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.239505] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance '514e0f15-f27d-4fab-9107-b92884075420' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1106.245401] env[62974]: INFO nova.scheduler.client.report [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Deleted allocations for instance a44cca2f-9286-490a-9013-1fea30984fa5 [ 1106.276565] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 12c769fb-8c9e-4089-9563-232cfad89b21] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1106.603816] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.676987] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.746552] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd44d3b-3387-4d00-9e95-6a45b54a75ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.768091] env[62974]: DEBUG oslo_concurrency.lockutils [None req-05f7ab01-83b8-4c1f-9522-7928ba545b9d tempest-AttachVolumeTestJSON-1041344595 tempest-AttachVolumeTestJSON-1041344595-project-member] Lock "a44cca2f-9286-490a-9013-1fea30984fa5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.187s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.770056] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c18115-b624-4b0d-931b-1b8854b2f505 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.788087] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 6928b412-e8cb-42fb-bc47-dc8498f12ad1] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1106.790025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance '514e0f15-f27d-4fab-9107-b92884075420' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1107.107999] env[62974]: DEBUG nova.compute.manager [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62974) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1107.176914] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.294229] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 3bcbcf35-294e-4d58-b002-cb84db4316d5] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1107.347850] env[62974]: DEBUG nova.network.neutron [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Port 9a104751-f775-4505-a6de-a82f22b2127c binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1107.678019] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.801521] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c38cddae-95b3-4f4a-bf3a-5f0bdde548a9] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1108.178971] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.208740] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.209062] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.304976] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 69fb00b3-6a41-4ef5-8876-6548cae31c07] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1108.368207] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.368379] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.368559] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.637984] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.638260] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.638471] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.639071] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.639260] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.641270] env[62974]: INFO nova.compute.manager [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Terminating instance [ 1108.678012] env[62974]: DEBUG oslo_vmware.api [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655081, 'name': ReconfigVM_Task, 'duration_secs': 5.761395} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.678274] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.678478] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfigured VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1108.711619] env[62974]: DEBUG nova.objects.instance [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'migration_context' on Instance uuid 220295bf-b021-4800-bc7e-a3dd311c747a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.808583] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c002aec9-4fdf-45c9-9ef6-d196c4891e19] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1108.923376] env[62974]: DEBUG nova.compute.manager [req-5432da6e-6748-4ec5-8796-2223af29c00e req-eb198b35-3e00-4f90-aa90-84af42df9a46 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-vif-deleted-30b29e6f-4b73-4bb9-9a84-6526189297a0 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1108.923580] env[62974]: INFO nova.compute.manager [req-5432da6e-6748-4ec5-8796-2223af29c00e req-eb198b35-3e00-4f90-aa90-84af42df9a46 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Neutron deleted interface 30b29e6f-4b73-4bb9-9a84-6526189297a0; detaching it from the instance and deleting it from the info cache [ 1108.923872] env[62974]: DEBUG nova.network.neutron [req-5432da6e-6748-4ec5-8796-2223af29c00e req-eb198b35-3e00-4f90-aa90-84af42df9a46 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ed04cd61-079c-4a14-8e69-0046a9247550", "address": "fa:16:3e:5b:0c:dc", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped04cd61-07", "ovs_interfaceid": "ed04cd61-079c-4a14-8e69-0046a9247550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.145080] env[62974]: DEBUG nova.compute.manager [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1109.145291] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.145573] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a49946d1-8707-4dd1-99f1-dcfee82830d7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.155119] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1109.155119] env[62974]: value = "task-2655088" [ 1109.155119] env[62974]: _type = "Task" [ 1109.155119] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.164115] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.313130] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 3426d512-d54e-4852-8eca-8ba9f5fef418] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1109.370566] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5758ad88-092d-498a-ba75-e389942b9295 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.381712] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df035586-8c9f-44ce-9dfa-584afadb696c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.427193] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.427386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.427566] env[62974]: DEBUG nova.network.neutron [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1109.429456] env[62974]: DEBUG oslo_concurrency.lockutils [req-5432da6e-6748-4ec5-8796-2223af29c00e req-eb198b35-3e00-4f90-aa90-84af42df9a46 service nova] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.429590] env[62974]: DEBUG oslo_concurrency.lockutils [req-5432da6e-6748-4ec5-8796-2223af29c00e req-eb198b35-3e00-4f90-aa90-84af42df9a46 service nova] Acquired lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.430357] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5031cba0-fdae-43f3-b514-feddbe73bfc3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.433973] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c5d62b-d6a8-4aa4-a9a1-b4938171d727 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.455801] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00269b65-8e70-4621-a412-a9cf17d5398f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.460046] env[62974]: DEBUG oslo_concurrency.lockutils [req-5432da6e-6748-4ec5-8796-2223af29c00e req-eb198b35-3e00-4f90-aa90-84af42df9a46 service nova] Releasing lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.460314] env[62974]: WARNING nova.compute.manager [req-5432da6e-6748-4ec5-8796-2223af29c00e req-eb198b35-3e00-4f90-aa90-84af42df9a46 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Detach interface failed, port_id=30b29e6f-4b73-4bb9-9a84-6526189297a0, reason: No device with interface-id 30b29e6f-4b73-4bb9-9a84-6526189297a0 exists on VM: nova.exception.NotFound: No device with interface-id 30b29e6f-4b73-4bb9-9a84-6526189297a0 exists on VM [ 1109.470486] env[62974]: DEBUG nova.compute.provider_tree [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.534304] env[62974]: DEBUG nova.compute.manager [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-vif-deleted-ed04cd61-079c-4a14-8e69-0046a9247550 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1109.534304] env[62974]: INFO nova.compute.manager [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Neutron deleted interface ed04cd61-079c-4a14-8e69-0046a9247550; detaching it from the instance and deleting it from the info cache [ 1109.534304] env[62974]: DEBUG nova.network.neutron [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.665605] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655088, 'name': PowerOffVM_Task, 'duration_secs': 0.204187} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.665882] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1109.666118] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1109.666336] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535476', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'name': 'volume-cf84a2af-6e27-461e-9af2-0471881dd540', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c', 'attached_at': '2025-02-19T03:59:34.000000', 'detached_at': '', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'serial': 'cf84a2af-6e27-461e-9af2-0471881dd540'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1109.667099] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418cad58-b64b-47d3-81ef-d049f2a7f9a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.686577] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbf18e9-cf6d-432f-a3bc-6e7f8c5efafc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.695370] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b027a9fb-cf95-431d-8f1d-8862ac2c9f6d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.713127] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ac0f68-bf90-461a-8952-c6a5770434f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.727247] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] The volume has not been displaced from its original location: [datastore2] volume-cf84a2af-6e27-461e-9af2-0471881dd540/volume-cf84a2af-6e27-461e-9af2-0471881dd540.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1109.732409] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1109.732648] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4b87b42-c53b-4116-9204-ba24349f2f7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.750258] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1109.750258] env[62974]: value = "task-2655089" [ 1109.750258] env[62974]: _type = "Task" [ 1109.750258] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.759990] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655089, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.815841] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 0c2642d5-85fe-4db5-9891-025c88ca8c7c] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1109.934615] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.934758] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.934934] env[62974]: DEBUG nova.network.neutron [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1109.973386] env[62974]: DEBUG nova.scheduler.client.report [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.037486] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.037669] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Acquired lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.038608] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a6544b-ee8b-4d71-b48d-61be37b681d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.059844] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435cc52c-2246-4f3c-946f-3062a238d91d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.086736] env[62974]: DEBUG nova.virt.vmwareapi.vmops [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfiguring VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1110.087147] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.087456] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fee84ef-3a37-4f16-8061-836405ee7498 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.109534] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Waiting for the task: (returnval){ [ 1110.109534] env[62974]: value = "task-2655090" [ 1110.109534] env[62974]: _type = "Task" [ 1110.109534] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.117038] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.168786] env[62974]: DEBUG nova.network.neutron [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.260306] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655089, 'name': ReconfigVM_Task, 'duration_secs': 0.159852} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.260588] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1110.265584] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bb7427e-ce54-4228-ac96-748bce70f845 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.287529] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1110.287529] env[62974]: value = "task-2655091" [ 1110.287529] env[62974]: _type = "Task" [ 1110.287529] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.299634] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655091, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.319250] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 8bd478ab-a101-4d6a-9e7c-bfde0fce81c7] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1110.621662] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.644508] env[62974]: DEBUG nova.network.neutron [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [{"id": "4d417e22-6d84-4f85-9504-ae36562bc03e", "address": "fa:16:3e:4e:29:82", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d417e22-6d", "ovs_interfaceid": "4d417e22-6d84-4f85-9504-ae36562bc03e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.671360] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.797496] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655091, 'name': ReconfigVM_Task, 'duration_secs': 0.149249} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.797804] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535476', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'name': 'volume-cf84a2af-6e27-461e-9af2-0471881dd540', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c3801f86-5aaa-42cd-a6b2-1b72b77aa74c', 'attached_at': '2025-02-19T03:59:34.000000', 'detached_at': '', 'volume_id': 'cf84a2af-6e27-461e-9af2-0471881dd540', 'serial': 'cf84a2af-6e27-461e-9af2-0471881dd540'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1110.798090] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1110.798841] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d75f1cb-cc1f-41be-8803-f852499a96cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.805358] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1110.805578] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1d3d56c-f00b-4f59-bdc7-25bd0f3a57e4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.822666] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: a14e7e40-afef-4607-8fa9-935a92ea49dc] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1110.874020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1110.874020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1110.874020] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleting the datastore file [datastore2] c3801f86-5aaa-42cd-a6b2-1b72b77aa74c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1110.874020] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d11c36b-dd89-424c-8a49-45610cc8f5d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.880694] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1110.880694] env[62974]: value = "task-2655094" [ 1110.880694] env[62974]: _type = "Task" [ 1110.880694] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.889474] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.985091] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.775s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.124611] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.147369] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.199222] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e27d22-d7a9-4054-b52b-3d9169f635fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.218518] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f4dfb0-6ad3-4c95-81b6-f30f4525150b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.225905] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance '514e0f15-f27d-4fab-9107-b92884075420' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1111.325595] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: da43a464-ebae-4038-9f7b-330df22d8d7c] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1111.390716] env[62974]: DEBUG oslo_vmware.api [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102188} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.391016] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1111.391267] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1111.391445] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1111.391615] env[62974]: INFO nova.compute.manager [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1111.391848] env[62974]: DEBUG oslo.service.loopingcall [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1111.392124] env[62974]: DEBUG nova.compute.manager [-] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1111.392244] env[62974]: DEBUG nova.network.neutron [-] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1111.622554] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.651558] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9f200046-c87a-4eba-9499-571b6cd6e370 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-72b0b643-7747-4dae-9d85-c8c6a573ce07-30b29e6f-4b73-4bb9-9a84-6526189297a0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.584s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.734020] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.734020] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbd4be39-5b11-4be4-98a8-c305a44981d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.740125] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1111.740125] env[62974]: value = "task-2655095" [ 1111.740125] env[62974]: _type = "Task" [ 1111.740125] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.747768] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.824534] env[62974]: DEBUG nova.compute.manager [req-68510baf-c087-4a26-8c7e-76dd1db5a10f req-a2b2cc89-29a2-4e31-a29b-c441cd94b98d service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Received event network-vif-deleted-5d45e949-a386-4bc2-a1a4-e9232bcaeeba {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1111.824534] env[62974]: INFO nova.compute.manager [req-68510baf-c087-4a26-8c7e-76dd1db5a10f req-a2b2cc89-29a2-4e31-a29b-c441cd94b98d service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Neutron deleted interface 5d45e949-a386-4bc2-a1a4-e9232bcaeeba; detaching it from the instance and deleting it from the info cache [ 1111.824534] env[62974]: DEBUG nova.network.neutron [req-68510baf-c087-4a26-8c7e-76dd1db5a10f req-a2b2cc89-29a2-4e31-a29b-c441cd94b98d service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.831808] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 8621428e-cf42-47a4-82c8-a003c377b257] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1112.122539] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.253932] env[62974]: DEBUG oslo_vmware.api [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655095, 'name': PowerOnVM_Task, 'duration_secs': 0.416035} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.254384] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.254673] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a839d056-fea4-4f16-be1f-4b49c49e2841 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance '514e0f15-f27d-4fab-9107-b92884075420' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1112.306126] env[62974]: DEBUG nova.network.neutron [-] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.326688] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62c033a3-ab75-49b3-858b-cb9e20364241 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.336210] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6368802-7f9e-4d5a-be9a-a3702feda51f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.347511] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c763d45b-44f0-4557-a726-7aad2bc58ba8] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1112.369074] env[62974]: DEBUG nova.compute.manager [req-68510baf-c087-4a26-8c7e-76dd1db5a10f req-a2b2cc89-29a2-4e31-a29b-c441cd94b98d service nova] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Detach interface failed, port_id=5d45e949-a386-4bc2-a1a4-e9232bcaeeba, reason: Instance c3801f86-5aaa-42cd-a6b2-1b72b77aa74c could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1112.523594] env[62974]: INFO nova.compute.manager [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Swapping old allocation on dict_keys(['bd3bd9ae-180c-41cf-831e-3dd3892efa18']) held by migration 1a61954c-c534-4854-94df-272a36bdfb72 for instance [ 1112.545762] env[62974]: DEBUG nova.scheduler.client.report [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Overwriting current allocation {'allocations': {'bd3bd9ae-180c-41cf-831e-3dd3892efa18': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 151}}, 'project_id': '5ecf0c1b56e34a6cbc2d073089e37efc', 'user_id': '7337dc651b624b41a4dae92e0603c534', 'consumer_generation': 1} on consumer 220295bf-b021-4800-bc7e-a3dd311c747a {{(pid=62974) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1112.624514] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.637834] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.638034] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.638215] env[62974]: DEBUG nova.network.neutron [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.808211] env[62974]: INFO nova.compute.manager [-] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Took 1.42 seconds to deallocate network for instance. [ 1112.851696] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 59ece0e8-85c2-499d-aba2-fd45fc116013] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1113.124230] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.342970] env[62974]: DEBUG nova.network.neutron [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [{"id": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "address": "fa:16:3e:1f:15:30", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaa4e7d6-34", "ovs_interfaceid": "daa4e7d6-34e5-4455-b28f-6ee056ef2e93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.354920] env[62974]: INFO nova.compute.manager [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Took 0.55 seconds to detach 1 volumes for instance. [ 1113.356555] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 366b5816-a847-48d1-ad03-5758e473a9d0] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1113.358747] env[62974]: DEBUG nova.compute.manager [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Deleting volume: cf84a2af-6e27-461e-9af2-0471881dd540 {{(pid=62974) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1113.625429] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.845469] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-220295bf-b021-4800-bc7e-a3dd311c747a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.846473] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e97618-2c1e-4e03-beb2-229dde00ca79 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.853917] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0504b041-b682-4120-92da-48f1f06a39f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.861679] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 85f8f79d-330a-49cd-b1ae-8de20c70fcab] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1113.903299] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.903299] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.903299] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.927939] env[62974]: INFO nova.scheduler.client.report [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted allocations for instance c3801f86-5aaa-42cd-a6b2-1b72b77aa74c [ 1114.125265] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.365215] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b3827c67-9075-4a53-9f9e-8651e3f4b211] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1114.437951] env[62974]: DEBUG oslo_concurrency.lockutils [None req-246c2131-ce9b-4ce0-b526-a50626987075 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "c3801f86-5aaa-42cd-a6b2-1b72b77aa74c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.800s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.626382] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.849383] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.850333] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.850590] env[62974]: DEBUG nova.compute.manager [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Going to confirm migration 9 {{(pid=62974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1114.867932] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d8b7a39f-ec73-4a87-9b1e-9428ca72f895] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1114.939848] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.940183] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37e05bd9-45da-480a-92e6-042e7c0ee45c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.947615] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1114.947615] env[62974]: value = "task-2655098" [ 1114.947615] env[62974]: _type = "Task" [ 1114.947615] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.964515] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.127107] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.409620] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.409808] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.410062] env[62974]: DEBUG nova.network.neutron [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.410312] env[62974]: DEBUG nova.objects.instance [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'info_cache' on Instance uuid 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.460127] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.460235] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.460433] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.460612] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.460776] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.462915] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655098, 'name': PowerOffVM_Task, 'duration_secs': 0.254841} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.463830] env[62974]: INFO nova.compute.manager [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Terminating instance [ 1115.464646] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.465276] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1115.465490] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1115.465643] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1115.465819] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1115.465960] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1115.466151] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1115.466388] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1115.466555] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1115.466720] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1115.466878] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1115.467062] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1115.473868] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac893ef3-d9ef-4abb-9f5a-0883376b10ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.491258] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1115.491258] env[62974]: value = "task-2655099" [ 1115.491258] env[62974]: _type = "Task" [ 1115.491258] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.499520] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.627996] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.862287] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.862534] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.985992] env[62974]: DEBUG nova.compute.manager [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1115.986343] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.987747] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f6ebc5-280f-44ed-b461-2408e1b1592a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.996518] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1115.997062] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e8e0f7b-2a94-46b2-8db7-4c8b1957a3de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.001787] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655099, 'name': ReconfigVM_Task, 'duration_secs': 0.183008} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.002915] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332bdf86-42de-4a2e-b533-e82c1f877d92 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.006950] env[62974]: DEBUG oslo_vmware.api [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1116.006950] env[62974]: value = "task-2655100" [ 1116.006950] env[62974]: _type = "Task" [ 1116.006950] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.027346] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1116.027590] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.027746] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1116.027927] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.028083] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1116.028233] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1116.028434] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1116.028589] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1116.028748] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1116.028915] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1116.029106] env[62974]: DEBUG nova.virt.hardware [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1116.030321] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbef2f59-3b4a-4a5c-90e3-73b54ae194b0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.036345] env[62974]: DEBUG oslo_vmware.api [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.040029] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1116.040029] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a3df79-9c14-a5d2-58e5-f5f9984d1045" [ 1116.040029] env[62974]: _type = "Task" [ 1116.040029] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.049831] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a3df79-9c14-a5d2-58e5-f5f9984d1045, 'name': SearchDatastore_Task, 'duration_secs': 0.007258} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.056978] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1116.057641] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70d3b642-9799-4168-8981-f6108e054f03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.077920] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1116.077920] env[62974]: value = "task-2655101" [ 1116.077920] env[62974]: _type = "Task" [ 1116.077920] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.086239] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655101, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.128450] env[62974]: DEBUG oslo_vmware.api [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Task: {'id': task-2655090, 'name': ReconfigVM_Task, 'duration_secs': 5.77711} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.128761] env[62974]: DEBUG oslo_concurrency.lockutils [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] Releasing lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.129081] env[62974]: DEBUG nova.virt.vmwareapi.vmops [req-3fb2f990-0a8e-492a-96b6-d6363dd02a86 req-298b11ba-b629-42f9-b0d5-5b7860d503b4 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Reconfigured VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1116.129538] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.042s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.129752] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.129949] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.130130] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.132168] env[62974]: INFO nova.compute.manager [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Terminating instance [ 1116.367966] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.368199] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1116.520372] env[62974]: DEBUG oslo_vmware.api [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655100, 'name': PowerOffVM_Task, 'duration_secs': 0.233006} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.520643] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.520812] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1116.521163] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31983927-4059-4b33-bbf9-9da00d9ce583 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.588387] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655101, 'name': ReconfigVM_Task, 'duration_secs': 0.223928} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.589760] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1116.590073] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1116.590262] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1116.590435] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleting the datastore file [datastore1] 11bd6a5d-9590-4aa3-aaf3-99d2ac394553 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1116.591234] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a353dddb-dea6-474d-98e7-8bbfb54005d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.594127] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30a93b17-9635-4cb9-b641-eb84c99ae1f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.618367] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.622384] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f211df40-1c8d-47cc-a812-46d1db504cf9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.635272] env[62974]: DEBUG oslo_vmware.api [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1116.635272] env[62974]: value = "task-2655103" [ 1116.635272] env[62974]: _type = "Task" [ 1116.635272] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.636275] env[62974]: DEBUG nova.compute.manager [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1116.636473] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1116.642928] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b2998d-b7b4-488f-8ae8-4b0db1340acf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.644611] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1116.644611] env[62974]: value = "task-2655104" [ 1116.644611] env[62974]: _type = "Task" [ 1116.644611] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.653427] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.653544] env[62974]: DEBUG oslo_vmware.api [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.654057] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13da9fcb-c70b-46f0-99c9-986c45dc0a8c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.658343] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.663043] env[62974]: DEBUG oslo_vmware.api [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1116.663043] env[62974]: value = "task-2655105" [ 1116.663043] env[62974]: _type = "Task" [ 1116.663043] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.670882] env[62974]: DEBUG oslo_vmware.api [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.749461] env[62974]: DEBUG nova.network.neutron [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.901552] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.145842] env[62974]: DEBUG oslo_vmware.api [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166328} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.150270] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1117.150471] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1117.150638] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1117.150805] env[62974]: INFO nova.compute.manager [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1117.151044] env[62974]: DEBUG oslo.service.loopingcall [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1117.151287] env[62974]: DEBUG nova.compute.manager [-] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1117.151393] env[62974]: DEBUG nova.network.neutron [-] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1117.157808] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655104, 'name': ReconfigVM_Task, 'duration_secs': 0.351073} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.158392] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a/220295bf-b021-4800-bc7e-a3dd311c747a.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1117.158898] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26e46da-fc50-49e9-b3d0-d5b48f164362 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.184556] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaa6b18-8549-4ab3-b08f-bf8add695f2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.193431] env[62974]: DEBUG oslo_vmware.api [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655105, 'name': PowerOffVM_Task, 'duration_secs': 0.225624} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.210610] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1117.211138] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1117.211254] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8478173-4ada-4da1-8f7a-2594336509ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.213619] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1f05b4-1392-4f52-b2ac-ec9f97fec18d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.239016] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaab053b-c80c-45ec-9a6d-326f8a4732b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.246238] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1117.246517] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb09f4aa-e4aa-4706-9130-07cdf775d07b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.251617] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1117.251617] env[62974]: value = "task-2655107" [ 1117.251617] env[62974]: _type = "Task" [ 1117.251617] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.255096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.255404] env[62974]: DEBUG nova.objects.instance [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'migration_context' on Instance uuid 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.257043] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.257377] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1117.264648] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.289032] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1117.289365] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1117.289472] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleting the datastore file [datastore1] 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1117.289700] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d5b0e89-67ea-415f-82f2-5d5e1f22f3c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.296163] env[62974]: DEBUG oslo_vmware.api [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1117.296163] env[62974]: value = "task-2655108" [ 1117.296163] env[62974]: _type = "Task" [ 1117.296163] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.305291] env[62974]: DEBUG oslo_vmware.api [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.451897] env[62974]: DEBUG nova.compute.manager [req-04d9dc05-e740-48b3-8361-2d1f93d00695 req-5fe74928-bd8f-426a-a794-a3fce5ecabf0 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Received event network-vif-deleted-6d33ceb1-e623-4a85-bed3-a9dba877fc7b {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1117.452126] env[62974]: INFO nova.compute.manager [req-04d9dc05-e740-48b3-8361-2d1f93d00695 req-5fe74928-bd8f-426a-a794-a3fce5ecabf0 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Neutron deleted interface 6d33ceb1-e623-4a85-bed3-a9dba877fc7b; detaching it from the instance and deleting it from the info cache [ 1117.452305] env[62974]: DEBUG nova.network.neutron [req-04d9dc05-e740-48b3-8361-2d1f93d00695 req-5fe74928-bd8f-426a-a794-a3fce5ecabf0 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.758657] env[62974]: DEBUG nova.objects.base [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Object Instance<514e0f15-f27d-4fab-9107-b92884075420> lazy-loaded attributes: info_cache,migration_context {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1117.764478] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645524a5-7cd0-4e0c-a556-7b3f2f6418e2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.766874] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655107, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.784049] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a992b56a-17d0-4e54-97d6-d27c0a0d0df8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.789314] env[62974]: DEBUG oslo_vmware.api [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1117.789314] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2a4cc-4aab-b927-b20d-1eb76c899154" [ 1117.789314] env[62974]: _type = "Task" [ 1117.789314] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.797512] env[62974]: DEBUG oslo_vmware.api [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2a4cc-4aab-b927-b20d-1eb76c899154, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.806727] env[62974]: DEBUG oslo_vmware.api [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192755} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.807619] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1117.807812] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1117.807990] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1117.808199] env[62974]: INFO nova.compute.manager [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1117.808422] env[62974]: DEBUG oslo.service.loopingcall [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1117.808612] env[62974]: DEBUG nova.compute.manager [-] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1117.808706] env[62974]: DEBUG nova.network.neutron [-] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1117.935840] env[62974]: DEBUG nova.network.neutron [-] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.958973] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bac168e0-89bd-465a-a0a6-c60da40fdbe3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.970023] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6705bd00-03e4-443c-b654-c7cfae7fc143 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.995036] env[62974]: DEBUG nova.compute.manager [req-04d9dc05-e740-48b3-8361-2d1f93d00695 req-5fe74928-bd8f-426a-a794-a3fce5ecabf0 service nova] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Detach interface failed, port_id=6d33ceb1-e623-4a85-bed3-a9dba877fc7b, reason: Instance 11bd6a5d-9590-4aa3-aaf3-99d2ac394553 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1118.262604] env[62974]: DEBUG oslo_vmware.api [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655107, 'name': PowerOnVM_Task, 'duration_secs': 0.62535} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.262916] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.299252] env[62974]: DEBUG oslo_vmware.api [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2a4cc-4aab-b927-b20d-1eb76c899154, 'name': SearchDatastore_Task, 'duration_secs': 0.009062} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.299566] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.299785] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.437570] env[62974]: INFO nova.compute.manager [-] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Took 1.29 seconds to deallocate network for instance. [ 1118.511273] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [{"id": "9a104751-f775-4505-a6de-a82f22b2127c", "address": "fa:16:3e:97:64:e7", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a104751-f7", "ovs_interfaceid": "9a104751-f775-4505-a6de-a82f22b2127c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.715515] env[62974]: DEBUG nova.network.neutron [-] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.935057] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5d5139-0896-4496-b4ea-a1b3bd4c3f14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.942390] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a43e3e-b17a-4652-9317-3a62f0a8d85b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.947900] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.972233] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa6c110-a8bf-4d6f-9c7d-de0287c5852c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.978994] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bf34bc-9541-4d46-8ddc-f4ad0a21a61b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.991562] env[62974]: DEBUG nova.compute.provider_tree [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.014025] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-514e0f15-f27d-4fab-9107-b92884075420" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.014025] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1119.014025] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.014193] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.014362] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.014509] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.014648] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.218135] env[62974]: INFO nova.compute.manager [-] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Took 1.41 seconds to deallocate network for instance. [ 1119.308546] env[62974]: INFO nova.compute.manager [None req-5bbdac3b-6de7-48e5-a76c-5282a3e052e6 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance to original state: 'active' [ 1119.478713] env[62974]: DEBUG nova.compute.manager [req-723b3903-7dd4-4d10-bc4d-1744fb4af7af req-a5fd1d78-f5b7-42ae-a19e-7e99d45908b3 service nova] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Received event network-vif-deleted-4d417e22-6d84-4f85-9504-ae36562bc03e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1119.514729] env[62974]: ERROR nova.scheduler.client.report [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [req-7af11d66-d03a-4f2e-aab2-48a0efc937d9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bd3bd9ae-180c-41cf-831e-3dd3892efa18. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7af11d66-d03a-4f2e-aab2-48a0efc937d9"}]} [ 1119.519012] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Getting list of instances from cluster (obj){ [ 1119.519012] env[62974]: value = "domain-c8" [ 1119.519012] env[62974]: _type = "ClusterComputeResource" [ 1119.519012] env[62974]: } {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1119.520082] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb85eec7-dbc8-4296-a398-082c1832440e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.536841] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Got total of 6 instances {{(pid=62974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1119.536841] env[62974]: WARNING nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] While synchronizing instance power states, found 8 instances in the database and 6 instances on the hypervisor. [ 1119.536841] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid cf73422d-7f4b-4bae-9d69-de74d7211243 {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.536841] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.536841] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.536841] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid 11bd6a5d-9590-4aa3-aaf3-99d2ac394553 {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.537190] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid 4de11643-da0a-453f-b03e-ca19819f4f06 {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.537190] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid 220295bf-b021-4800-bc7e-a3dd311c747a {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.537250] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.537388] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Triggering sync for uuid 2a962aab-3057-43df-97f7-b63ce808fb90 {{(pid=62974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1119.538433] env[62974]: DEBUG nova.scheduler.client.report [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1119.540331] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "cf73422d-7f4b-4bae-9d69-de74d7211243" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.540544] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.540806] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.541016] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.541225] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.541460] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.541664] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.541856] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.542254] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.542336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.542481] env[62974]: INFO nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] During sync_power_state the instance has a pending task (resize_reverting). Skip. [ 1119.542639] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1119.542819] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.543078] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "2a962aab-3057-43df-97f7-b63ce808fb90" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.543256] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "2a962aab-3057-43df-97f7-b63ce808fb90" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.543520] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.543662] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1119.544568] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e64b4f-957b-4e20-a9a9-a3ffbe554ee3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.547731] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac8fa5c-770a-4eff-be12-a49c5f72de99 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.550385] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29e4174-d13d-4cce-8de7-90ef325bc150 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.553288] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea4dd90-109a-4203-995a-f5101a51d969 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.555525] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.557299] env[62974]: DEBUG nova.scheduler.client.report [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1119.557502] env[62974]: DEBUG nova.compute.provider_tree [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.573467] env[62974]: DEBUG nova.scheduler.client.report [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1119.593419] env[62974]: DEBUG nova.scheduler.client.report [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1119.699440] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f6bbdd-1c4f-4661-83c8-73e635ca6124 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.707058] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2f4447-657d-4fcd-ab7b-e79d69f5c57c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.740347] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.741698] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6d8388-3952-4044-baf9-e7ce54e7a1e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.749397] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7c9a88-fb86-4f67-90cd-2e799980a80a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.763690] env[62974]: DEBUG nova.compute.provider_tree [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.061299] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.074186] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.533s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.074628] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.534s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.076111] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.534s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.076414] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "2a962aab-3057-43df-97f7-b63ce808fb90" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.533s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.280541] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.280790] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.281011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.281234] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.281405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.283496] env[62974]: INFO nova.compute.manager [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Terminating instance [ 1120.294018] env[62974]: DEBUG nova.scheduler.client.report [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updated inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1120.294290] env[62974]: DEBUG nova.compute.provider_tree [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 generation from 152 to 153 during operation: update_inventory {{(pid=62974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1120.294475] env[62974]: DEBUG nova.compute.provider_tree [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.787499] env[62974]: DEBUG nova.compute.manager [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1120.787945] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1120.788038] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f2d91e9-f6af-48d4-b2a1-b3a86a5c217f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.794654] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1120.794654] env[62974]: value = "task-2655109" [ 1120.794654] env[62974]: _type = "Task" [ 1120.794654] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.805238] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.305215] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655109, 'name': PowerOffVM_Task, 'duration_secs': 0.415814} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.305501] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.305673] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1121.305861] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535489', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'name': 'volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '220295bf-b021-4800-bc7e-a3dd311c747a', 'attached_at': '2025-02-19T04:00:16.000000', 'detached_at': '', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'serial': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1121.307054] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.007s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.310092] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12d79ec-f941-46ea-95c2-9264d4a0d96b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.312765] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.365s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.312977] env[62974]: DEBUG nova.objects.instance [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'resources' on Instance uuid 11bd6a5d-9590-4aa3-aaf3-99d2ac394553 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.333816] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dc3613-48a2-46c4-ad54-c8c1800d36eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.341255] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1673ba-8be1-4884-802c-d05537e97e0c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.998542] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c644c366-27c5-4b61-bb40-8fae670217fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.013962] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] The volume has not been displaced from its original location: [datastore2] volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb/volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1122.018961] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1122.023117] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d074214-249e-4de8-934e-a110113ed34f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.040799] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1122.040799] env[62974]: value = "task-2655110" [ 1122.040799] env[62974]: _type = "Task" [ 1122.040799] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.048195] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655110, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.077852] env[62974]: INFO nova.scheduler.client.report [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted allocation for migration b47d67b1-b862-4c01-9dc5-efc0452e1e77 [ 1122.146490] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1dcb2b2-2433-452c-bc6c-fad365e6c0dd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.153539] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaad4ea9-8f29-45ab-a696-013d5e880d48 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.183808] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35222e56-98e8-41cb-b888-ea34108066a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.190830] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6fca065-4f4b-49a9-bcca-ca1853d6e925 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.205282] env[62974]: DEBUG nova.compute.provider_tree [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.552078] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655110, 'name': ReconfigVM_Task, 'duration_secs': 0.211163} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.552407] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1122.557549] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f5520e9-ad60-48b6-96d2-295c0591bba0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.572563] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1122.572563] env[62974]: value = "task-2655111" [ 1122.572563] env[62974]: _type = "Task" [ 1122.572563] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.580326] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.584802] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3b3c557d-ae74-4bc7-a6d7-12917fa9afb1 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.735s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.585571] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "514e0f15-f27d-4fab-9107-b92884075420" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.045s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.586496] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9fa8e3-8494-4195-ae94-5f477ea0f9f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.707972] env[62974]: DEBUG nova.scheduler.client.report [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.944248] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.082033] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655111, 'name': ReconfigVM_Task, 'duration_secs': 0.131621} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.082344] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535489', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'name': 'volume-e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '220295bf-b021-4800-bc7e-a3dd311c747a', 'attached_at': '2025-02-19T04:00:16.000000', 'detached_at': '', 'volume_id': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb', 'serial': 'e8b7a0d1-e9c7-4352-9452-ec4d12c598bb'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1123.082593] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1123.083342] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d502c8-b8a5-4e68-9b87-bb6604909cd7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.089695] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1123.090291] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51bc2c91-163d-4381-9ed9-d78449500346 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.094857] env[62974]: INFO nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 514e0f15-f27d-4fab-9107-b92884075420] During sync_power_state the instance has a pending task (deleting). Skip. [ 1123.095064] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "514e0f15-f27d-4fab-9107-b92884075420" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.095301] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.151s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.095499] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "514e0f15-f27d-4fab-9107-b92884075420-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.095692] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.095854] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.097737] env[62974]: INFO nova.compute.manager [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Terminating instance [ 1123.158138] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1123.158369] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1123.158553] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleting the datastore file [datastore1] 220295bf-b021-4800-bc7e-a3dd311c747a {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1123.158812] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71b31306-7b83-41fe-97aa-c01959111607 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.165425] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1123.165425] env[62974]: value = "task-2655113" [ 1123.165425] env[62974]: _type = "Task" [ 1123.165425] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.173304] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.213344] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.215933] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.476s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.216213] env[62974]: DEBUG nova.objects.instance [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'resources' on Instance uuid 72b0b643-7747-4dae-9d85-c8c6a573ce07 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.234350] env[62974]: INFO nova.scheduler.client.report [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted allocations for instance 11bd6a5d-9590-4aa3-aaf3-99d2ac394553 [ 1123.601922] env[62974]: DEBUG nova.compute.manager [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1123.602290] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1123.603298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd72d14-9749-4dac-a6f2-38acf5aa76ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.611720] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1123.611976] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93afbf0c-a12c-45d0-847c-a1550eac3c03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.617714] env[62974]: DEBUG oslo_vmware.api [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1123.617714] env[62974]: value = "task-2655114" [ 1123.617714] env[62974]: _type = "Task" [ 1123.617714] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.625847] env[62974]: DEBUG oslo_vmware.api [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.675261] env[62974]: DEBUG oslo_vmware.api [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154232} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.675971] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.675971] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.675971] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.676216] env[62974]: INFO nova.compute.manager [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Took 2.89 seconds to destroy the instance on the hypervisor. [ 1123.676266] env[62974]: DEBUG oslo.service.loopingcall [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1123.676492] env[62974]: DEBUG nova.compute.manager [-] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1123.676585] env[62974]: DEBUG nova.network.neutron [-] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1123.743026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c00bf3b7-a47b-4759-9747-69d02378503f tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.282s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.744027] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.202s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.744504] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa8ce511-dd03-4ef1-9839-94d605692bc8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.757122] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed73a704-0926-4148-9ab6-2263879a994e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.850103] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4084f7c-1f99-446a-b826-3387feea28e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.858178] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ec63b5-020a-4ac6-9b04-e4f9eca04d0d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.889071] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7919ad6-932e-4534-92e9-398cdedc8721 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.895806] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d91f59-a0f7-4c14-9498-be9e86fd3c47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.908741] env[62974]: DEBUG nova.compute.provider_tree [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.128575] env[62974]: DEBUG oslo_vmware.api [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655114, 'name': PowerOffVM_Task, 'duration_secs': 0.183655} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.128877] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1124.128994] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1124.129305] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-247e1bcf-a9c5-4e1a-a144-5edb412657d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.136194] env[62974]: DEBUG nova.compute.manager [req-65bb7215-51ae-45dc-9cda-d2eed17624db req-7ce2821c-9e84-4794-8a9d-9f8bd4c59762 service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Received event network-vif-deleted-daa4e7d6-34e5-4455-b28f-6ee056ef2e93 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1124.136660] env[62974]: INFO nova.compute.manager [req-65bb7215-51ae-45dc-9cda-d2eed17624db req-7ce2821c-9e84-4794-8a9d-9f8bd4c59762 service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Neutron deleted interface daa4e7d6-34e5-4455-b28f-6ee056ef2e93; detaching it from the instance and deleting it from the info cache [ 1124.136852] env[62974]: DEBUG nova.network.neutron [req-65bb7215-51ae-45dc-9cda-d2eed17624db req-7ce2821c-9e84-4794-8a9d-9f8bd4c59762 service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.191454] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1124.191683] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1124.191855] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleting the datastore file [datastore2] 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1124.192139] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de633a1f-27ba-438f-883d-0da0e6f8f45d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.213765] env[62974]: DEBUG oslo_vmware.api [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1124.213765] env[62974]: value = "task-2655117" [ 1124.213765] env[62974]: _type = "Task" [ 1124.213765] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.221846] env[62974]: DEBUG oslo_vmware.api [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.296247] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "11bd6a5d-9590-4aa3-aaf3-99d2ac394553" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.552s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.411357] env[62974]: DEBUG nova.scheduler.client.report [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.610816] env[62974]: DEBUG nova.network.neutron [-] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.639440] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9516e5ec-95dc-4bdd-ba9d-df6fcbff65b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.648959] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504619a9-275e-4b46-a4c3-8acb05233fa9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.676497] env[62974]: DEBUG nova.compute.manager [req-65bb7215-51ae-45dc-9cda-d2eed17624db req-7ce2821c-9e84-4794-8a9d-9f8bd4c59762 service nova] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Detach interface failed, port_id=daa4e7d6-34e5-4455-b28f-6ee056ef2e93, reason: Instance 220295bf-b021-4800-bc7e-a3dd311c747a could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1124.723257] env[62974]: DEBUG oslo_vmware.api [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140672} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.723519] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1124.723701] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1124.723872] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1124.724055] env[62974]: INFO nova.compute.manager [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1124.724298] env[62974]: DEBUG oslo.service.loopingcall [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1124.724495] env[62974]: DEBUG nova.compute.manager [-] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1124.724593] env[62974]: DEBUG nova.network.neutron [-] [instance: 514e0f15-f27d-4fab-9107-b92884075420] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1124.917293] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.922014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.860s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.922014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.922014] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1124.922014] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f946c5-7c97-4f20-8d0f-64d382527540 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.930411] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569d158b-3356-4225-b483-43920019db10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.944846] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02239896-75cb-4274-afcc-a7dc34c99b73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.947701] env[62974]: INFO nova.scheduler.client.report [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted allocations for instance 72b0b643-7747-4dae-9d85-c8c6a573ce07 [ 1124.958024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c6796e-556d-4f84-b5fb-30476199761b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.987941] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179408MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1124.987941] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.988129] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.116768] env[62974]: INFO nova.compute.manager [-] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Took 1.44 seconds to deallocate network for instance. [ 1125.456759] env[62974]: DEBUG oslo_concurrency.lockutils [None req-ee886650-4273-4404-afb5-bf1bb3176f8a tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.327s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.458154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.915s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.458282] env[62974]: INFO nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] During sync_power_state the instance has a pending task (deleting). Skip. [ 1125.458456] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "72b0b643-7747-4dae-9d85-c8c6a573ce07" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.643227] env[62974]: DEBUG nova.network.neutron [-] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.661879] env[62974]: INFO nova.compute.manager [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Took 0.55 seconds to detach 1 volumes for instance. [ 1126.014136] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance cf73422d-7f4b-4bae-9d69-de74d7211243 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1126.014308] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 4de11643-da0a-453f-b03e-ca19819f4f06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1126.014434] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance c90c9a6d-661f-4574-8a0d-7d8cacf8618d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1126.014555] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 2a962aab-3057-43df-97f7-b63ce808fb90 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1126.014672] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 220295bf-b021-4800-bc7e-a3dd311c747a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1126.014785] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 514e0f15-f27d-4fab-9107-b92884075420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1126.014970] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1126.015117] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1126.093416] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b4ae37-21ba-4196-b3ca-52f31b1b40e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.102274] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44931d07-8b1e-4da7-98b5-298e655ad506 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.131382] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b893b33a-edb3-4b23-a8a0-4732b9423d81 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.139779] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e03942d-669b-4bc0-9a50-fab327b7ee14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.146030] env[62974]: INFO nova.compute.manager [-] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Took 1.42 seconds to deallocate network for instance. [ 1126.154601] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.167922] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.172437] env[62974]: DEBUG nova.compute.manager [req-a4907bcf-f766-468e-bc85-6a4015dd57e0 req-7a12b331-7e3b-4a02-85b8-10796da8bba5 service nova] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Received event network-vif-deleted-9a104751-f775-4505-a6de-a82f22b2127c {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1126.660382] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.664008] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.165877] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1127.166122] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.178s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.166405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.999s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.166806] env[62974]: DEBUG nova.objects.instance [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'resources' on Instance uuid 220295bf-b021-4800-bc7e-a3dd311c747a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.426170] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "cf73422d-7f4b-4bae-9d69-de74d7211243" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.426483] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.426718] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "cf73422d-7f4b-4bae-9d69-de74d7211243-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.426904] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.427087] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.429233] env[62974]: INFO nova.compute.manager [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Terminating instance [ 1127.738370] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.738698] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.763727] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f4c382-522d-4b2f-95e8-1476bd27c922 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.771807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91623f6a-e27f-41b9-b062-8911aca04ce6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.801780] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4a5e4a-983f-4e0a-8f33-582c6325d3d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.809427] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09286b1c-3b0b-419a-baff-2e927d4bd77d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.823317] env[62974]: DEBUG nova.compute.provider_tree [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.933012] env[62974]: DEBUG nova.compute.manager [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1127.933258] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1127.934137] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273de05c-3a82-45fd-bbf7-d9116cd9de4f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.943478] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.943711] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1aaf5ed8-623c-4cda-a9f6-3c4f7447f593 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.950103] env[62974]: DEBUG oslo_vmware.api [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1127.950103] env[62974]: value = "task-2655118" [ 1127.950103] env[62974]: _type = "Task" [ 1127.950103] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.958154] env[62974]: DEBUG oslo_vmware.api [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.241656] env[62974]: DEBUG nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1128.327028] env[62974]: DEBUG nova.scheduler.client.report [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.460018] env[62974]: DEBUG oslo_vmware.api [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655118, 'name': PowerOffVM_Task, 'duration_secs': 0.214222} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.460316] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1128.460485] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1128.460725] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d16da59a-d391-463c-a4cf-407f30d29327 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.530015] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1128.530246] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1128.530429] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleting the datastore file [datastore2] cf73422d-7f4b-4bae-9d69-de74d7211243 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1128.530692] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdff1571-2ac8-4858-8bd6-c6386c73c01f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.537089] env[62974]: DEBUG oslo_vmware.api [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for the task: (returnval){ [ 1128.537089] env[62974]: value = "task-2655120" [ 1128.537089] env[62974]: _type = "Task" [ 1128.537089] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.545588] env[62974]: DEBUG oslo_vmware.api [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.764888] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.831723] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.833973] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.170s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.834230] env[62974]: DEBUG nova.objects.instance [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'resources' on Instance uuid 514e0f15-f27d-4fab-9107-b92884075420 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.849210] env[62974]: INFO nova.scheduler.client.report [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted allocations for instance 220295bf-b021-4800-bc7e-a3dd311c747a [ 1129.047984] env[62974]: DEBUG oslo_vmware.api [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Task: {'id': task-2655120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130464} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.048289] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1129.048568] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1129.048775] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1129.048975] env[62974]: INFO nova.compute.manager [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1129.049257] env[62974]: DEBUG oslo.service.loopingcall [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1129.049511] env[62974]: DEBUG nova.compute.manager [-] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1129.049612] env[62974]: DEBUG nova.network.neutron [-] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1129.303801] env[62974]: DEBUG nova.compute.manager [req-103d1f5f-2ee5-4977-880f-a52b31fffadb req-effeb83a-c9e0-4f38-89f2-f2d674032bf2 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Received event network-vif-deleted-7e4b21ba-e0f2-4104-8f46-57871fd6ed16 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1129.304023] env[62974]: INFO nova.compute.manager [req-103d1f5f-2ee5-4977-880f-a52b31fffadb req-effeb83a-c9e0-4f38-89f2-f2d674032bf2 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Neutron deleted interface 7e4b21ba-e0f2-4104-8f46-57871fd6ed16; detaching it from the instance and deleting it from the info cache [ 1129.304222] env[62974]: DEBUG nova.network.neutron [req-103d1f5f-2ee5-4977-880f-a52b31fffadb req-effeb83a-c9e0-4f38-89f2-f2d674032bf2 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.357412] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b3ae2fe4-4cc7-4dc8-b593-813a2435e94d tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "220295bf-b021-4800-bc7e-a3dd311c747a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.077s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.409927] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6d0c0a-f3c3-4936-81f2-ce45342f1de6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.417744] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede65795-b5e7-48d1-8735-a83c826d33bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.449806] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecca647e-5756-45c3-96b2-87350d696fab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.457393] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4797c6-81d9-43de-a690-8b89d728e633 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.470524] env[62974]: DEBUG nova.compute.provider_tree [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.788871] env[62974]: DEBUG nova.network.neutron [-] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.806906] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-997fa0ce-78b6-48d7-932d-a0136893f8cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.817054] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8bc134-094a-4150-924b-79b6a09c9725 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.842336] env[62974]: DEBUG nova.compute.manager [req-103d1f5f-2ee5-4977-880f-a52b31fffadb req-effeb83a-c9e0-4f38-89f2-f2d674032bf2 service nova] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Detach interface failed, port_id=7e4b21ba-e0f2-4104-8f46-57871fd6ed16, reason: Instance cf73422d-7f4b-4bae-9d69-de74d7211243 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1129.973869] env[62974]: DEBUG nova.scheduler.client.report [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.291358] env[62974]: INFO nova.compute.manager [-] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Took 1.24 seconds to deallocate network for instance. [ 1130.478910] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.645s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.481251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.716s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.483234] env[62974]: INFO nova.compute.claims [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.494461] env[62974]: INFO nova.scheduler.client.report [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted allocations for instance 514e0f15-f27d-4fab-9107-b92884075420 [ 1130.635762] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.635996] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.798094] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.000627] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2936411b-a1b3-4110-bd28-95e24dd3f27a tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "514e0f15-f27d-4fab-9107-b92884075420" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.905s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.139928] env[62974]: DEBUG nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1131.571349] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcb7119-df1e-4c1f-93a8-18bd2b690820 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.578948] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783cd5a3-9279-4771-8972-f7bbc5854187 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.608463] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fef149-a055-46d9-a5a4-db6b3cf7e39c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.615689] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cfa4bd-c6d7-4612-b8a3-2414f5294e35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.628235] env[62974]: DEBUG nova.compute.provider_tree [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.657532] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.063215] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "744a685d-845e-4818-abb5-c70056fd4cd0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.063548] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.131057] env[62974]: DEBUG nova.scheduler.client.report [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.566023] env[62974]: DEBUG nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1132.636023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.155s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.636490] env[62974]: DEBUG nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1132.639108] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.841s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.639364] env[62974]: DEBUG nova.objects.instance [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lazy-loading 'resources' on Instance uuid cf73422d-7f4b-4bae-9d69-de74d7211243 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.088368] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.142284] env[62974]: DEBUG nova.compute.utils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1133.146659] env[62974]: DEBUG nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1133.146659] env[62974]: DEBUG nova.network.neutron [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1133.193139] env[62974]: DEBUG nova.policy [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1133.232134] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b355e45-4829-45c8-8ce8-cdbd3c6d6a0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.239664] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ce1e96-d8bf-4369-aa8d-7555af3976a4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.271035] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d221be9-8d39-4e96-887a-30353c9e1e88 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.277399] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8115e473-44f2-45ba-b8fe-1c2abcc5203e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.290389] env[62974]: DEBUG nova.compute.provider_tree [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.463041] env[62974]: DEBUG nova.network.neutron [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Successfully created port: 3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1133.646526] env[62974]: DEBUG nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1133.793851] env[62974]: DEBUG nova.scheduler.client.report [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.298726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.300999] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.643s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.302690] env[62974]: INFO nova.compute.claims [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1134.322958] env[62974]: INFO nova.scheduler.client.report [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Deleted allocations for instance cf73422d-7f4b-4bae-9d69-de74d7211243 [ 1134.656450] env[62974]: DEBUG nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1134.684237] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1134.684487] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1134.684647] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1134.684825] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1134.684972] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1134.685130] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1134.685338] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1134.685497] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1134.685663] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1134.685824] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1134.685993] env[62974]: DEBUG nova.virt.hardware [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1134.686882] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e556a0-60a7-400f-856a-1e51fc979c97 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.694656] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e7e515-81ae-4a75-82bf-735fe9647f7e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.764112] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "2a962aab-3057-43df-97f7-b63ce808fb90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.764405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "2a962aab-3057-43df-97f7-b63ce808fb90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.764641] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "2a962aab-3057-43df-97f7-b63ce808fb90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.764832] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "2a962aab-3057-43df-97f7-b63ce808fb90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.765011] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "2a962aab-3057-43df-97f7-b63ce808fb90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.767051] env[62974]: INFO nova.compute.manager [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Terminating instance [ 1134.806297] env[62974]: DEBUG nova.compute.manager [req-de880797-c46e-472f-b000-9b2ef26744f7 req-b484e671-10ad-45bd-a346-ac15c6732f0f service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-vif-plugged-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1134.806525] env[62974]: DEBUG oslo_concurrency.lockutils [req-de880797-c46e-472f-b000-9b2ef26744f7 req-b484e671-10ad-45bd-a346-ac15c6732f0f service nova] Acquiring lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.806748] env[62974]: DEBUG oslo_concurrency.lockutils [req-de880797-c46e-472f-b000-9b2ef26744f7 req-b484e671-10ad-45bd-a346-ac15c6732f0f service nova] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.806872] env[62974]: DEBUG oslo_concurrency.lockutils [req-de880797-c46e-472f-b000-9b2ef26744f7 req-b484e671-10ad-45bd-a346-ac15c6732f0f service nova] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.807048] env[62974]: DEBUG nova.compute.manager [req-de880797-c46e-472f-b000-9b2ef26744f7 req-b484e671-10ad-45bd-a346-ac15c6732f0f service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] No waiting events found dispatching network-vif-plugged-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1134.807217] env[62974]: WARNING nova.compute.manager [req-de880797-c46e-472f-b000-9b2ef26744f7 req-b484e671-10ad-45bd-a346-ac15c6732f0f service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received unexpected event network-vif-plugged-3130f1da-8f58-4210-ac5f-966ca6592a53 for instance with vm_state building and task_state spawning. [ 1134.831310] env[62974]: DEBUG oslo_concurrency.lockutils [None req-11ce28bf-fc17-4f4a-800e-feeefe910e82 tempest-ServerActionsTestOtherA-1164862337 tempest-ServerActionsTestOtherA-1164862337-project-member] Lock "cf73422d-7f4b-4bae-9d69-de74d7211243" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.405s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.887707] env[62974]: DEBUG nova.network.neutron [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Successfully updated port: 3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1135.271457] env[62974]: DEBUG nova.compute.manager [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1135.271689] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1135.272656] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d3f1d8-2795-4bbf-aa61-2b7b280821c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.280530] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1135.280789] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2e1f80d-11f0-4419-81a6-aa1f09fdca5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.287408] env[62974]: DEBUG oslo_vmware.api [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1135.287408] env[62974]: value = "task-2655121" [ 1135.287408] env[62974]: _type = "Task" [ 1135.287408] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.294669] env[62974]: DEBUG oslo_vmware.api [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655121, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.390609] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.390888] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.390939] env[62974]: DEBUG nova.network.neutron [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1135.412203] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5603e4e1-5b8b-48a5-8d57-3f9c18c40e29 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.423175] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42365e68-3380-480f-baef-ecb3fed34043 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.455439] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0fe170-b5a3-4cf7-888f-7bd568b0b649 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.463024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ae8c52-12dc-40ef-b322-f360a80a3f5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.476351] env[62974]: DEBUG nova.compute.provider_tree [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.797042] env[62974]: DEBUG oslo_vmware.api [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655121, 'name': PowerOffVM_Task, 'duration_secs': 0.236551} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.797307] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1135.797470] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1135.797706] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1bdc8d27-e682-4e12-8ab6-67f87b5b9ce5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.882293] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1135.882419] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1135.882540] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleting the datastore file [datastore1] 2a962aab-3057-43df-97f7-b63ce808fb90 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1135.882828] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26abc328-b0cb-4eff-861d-7304bccc1e25 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.890128] env[62974]: DEBUG oslo_vmware.api [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1135.890128] env[62974]: value = "task-2655123" [ 1135.890128] env[62974]: _type = "Task" [ 1135.890128] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.900395] env[62974]: DEBUG oslo_vmware.api [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.969655] env[62974]: DEBUG nova.network.neutron [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1135.983918] env[62974]: DEBUG nova.scheduler.client.report [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.374008] env[62974]: DEBUG nova.network.neutron [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.400963] env[62974]: DEBUG oslo_vmware.api [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127367} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.401318] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.401629] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1136.401629] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1136.401805] env[62974]: INFO nova.compute.manager [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1136.402149] env[62974]: DEBUG oslo.service.loopingcall [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1136.402321] env[62974]: DEBUG nova.compute.manager [-] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1136.402438] env[62974]: DEBUG nova.network.neutron [-] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1136.492390] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.492726] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.492926] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.493154] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.496721] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.496721] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.496721] env[62974]: DEBUG nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1136.500973] env[62974]: INFO nova.compute.manager [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Terminating instance [ 1136.504243] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.416s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.505791] env[62974]: INFO nova.compute.claims [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1136.832538] env[62974]: DEBUG nova.compute.manager [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1136.832748] env[62974]: DEBUG nova.compute.manager [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing instance network info cache due to event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1136.832938] env[62974]: DEBUG oslo_concurrency.lockutils [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.876669] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.877014] env[62974]: DEBUG nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Instance network_info: |[{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1136.877333] env[62974]: DEBUG oslo_concurrency.lockutils [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.877516] env[62974]: DEBUG nova.network.neutron [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1136.878740] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:4c:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3130f1da-8f58-4210-ac5f-966ca6592a53', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1136.886984] env[62974]: DEBUG oslo.service.loopingcall [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1136.888620] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1136.889508] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74d622d5-21d2-4d54-9168-955dd04d78fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.911495] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1136.911495] env[62974]: value = "task-2655124" [ 1136.911495] env[62974]: _type = "Task" [ 1136.911495] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.921204] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655124, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.003069] env[62974]: DEBUG nova.compute.utils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1137.004495] env[62974]: DEBUG nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1137.004668] env[62974]: DEBUG nova.network.neutron [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.012838] env[62974]: DEBUG nova.compute.manager [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1137.013343] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1137.015458] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1bab5b-d1de-47df-bc0b-ba9f385fc531 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.023241] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.024124] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fa6716a-0d9d-4b25-b1a0-568234e4630b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.030276] env[62974]: DEBUG oslo_vmware.api [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1137.030276] env[62974]: value = "task-2655125" [ 1137.030276] env[62974]: _type = "Task" [ 1137.030276] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.042543] env[62974]: DEBUG oslo_vmware.api [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.069765] env[62974]: DEBUG nova.policy [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7337dc651b624b41a4dae92e0603c534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ecf0c1b56e34a6cbc2d073089e37efc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1137.383688] env[62974]: DEBUG nova.network.neutron [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Successfully created port: 6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1137.391870] env[62974]: DEBUG nova.network.neutron [-] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.422955] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655124, 'name': CreateVM_Task, 'duration_secs': 0.36718} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.423194] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1137.423883] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.424098] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.424417] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1137.425889] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-782fa8a8-8cda-49df-8039-417f8a49176e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.430672] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1137.430672] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]523c7f99-2e78-009f-f523-239be9a20e8b" [ 1137.430672] env[62974]: _type = "Task" [ 1137.430672] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.439612] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523c7f99-2e78-009f-f523-239be9a20e8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.507752] env[62974]: DEBUG nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1137.543450] env[62974]: DEBUG oslo_vmware.api [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655125, 'name': PowerOffVM_Task, 'duration_secs': 0.22786} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.543717] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.543899] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1137.544165] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fb5d0a8-cbc0-463a-bcf9-079ddae50153 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.609332] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1137.609558] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1137.609931] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleting the datastore file [datastore2] c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1137.610044] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34017c0f-13d1-4048-962c-c6eb9c9a4a9d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.618211] env[62974]: DEBUG oslo_vmware.api [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for the task: (returnval){ [ 1137.618211] env[62974]: value = "task-2655127" [ 1137.618211] env[62974]: _type = "Task" [ 1137.618211] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.626590] env[62974]: DEBUG oslo_vmware.api [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.629569] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830a8463-f52a-4a42-81f3-5effda7b78f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.636318] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ace47a-afb1-4804-bdc1-4dfecc60bea0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.669776] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6143b913-d835-42be-be62-0c6cd9589077 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.677835] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819de2ed-d783-4abf-960b-2033fa9d3d78 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.692700] env[62974]: DEBUG nova.compute.provider_tree [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.742019] env[62974]: DEBUG nova.network.neutron [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updated VIF entry in instance network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1137.742407] env[62974]: DEBUG nova.network.neutron [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.896357] env[62974]: INFO nova.compute.manager [-] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Took 1.49 seconds to deallocate network for instance. [ 1137.940108] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]523c7f99-2e78-009f-f523-239be9a20e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.011099} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.940403] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.940630] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1137.940855] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.941027] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.941188] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.941538] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38df82a4-b586-4ad0-8a1c-aa1d1e403edc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.949970] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.950178] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1137.951331] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84478418-ca5b-459e-a6c1-1472a375333d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.956052] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1137.956052] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5278c91b-0c94-5218-5cf5-73d162e836d5" [ 1137.956052] env[62974]: _type = "Task" [ 1137.956052] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.963932] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5278c91b-0c94-5218-5cf5-73d162e836d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.127761] env[62974]: DEBUG oslo_vmware.api [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Task: {'id': task-2655127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193641} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.127761] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1138.128022] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1138.128197] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1138.128347] env[62974]: INFO nova.compute.manager [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1138.128636] env[62974]: DEBUG oslo.service.loopingcall [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.129021] env[62974]: DEBUG nova.compute.manager [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1138.129190] env[62974]: DEBUG nova.network.neutron [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1138.196094] env[62974]: DEBUG nova.scheduler.client.report [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.245419] env[62974]: DEBUG oslo_concurrency.lockutils [req-c591a425-1aa8-4e2d-8703-dcc14ec501d6 req-3307e389-ba20-4fab-b721-ddc43874aba5 service nova] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.403421] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.476025] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5278c91b-0c94-5218-5cf5-73d162e836d5, 'name': SearchDatastore_Task, 'duration_secs': 0.00902} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.476025] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a48473f0-0be2-4123-b1c5-a440f29e80c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.482544] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1138.482544] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c7cf47-7f05-bb4a-8b36-4764147b5873" [ 1138.482544] env[62974]: _type = "Task" [ 1138.482544] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.494068] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c7cf47-7f05-bb4a-8b36-4764147b5873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.519351] env[62974]: DEBUG nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1138.548020] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1138.548020] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.548020] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.548020] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.548020] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.548020] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1138.548606] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1138.548924] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1138.549254] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1138.549586] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1138.550099] env[62974]: DEBUG nova.virt.hardware [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1138.550894] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be03ec7d-102e-4192-9ad8-b2fb2dc477b3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.562017] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a691ef7-add8-47b6-94f1-0cbbe7c49650 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.704020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.197s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.704020] env[62974]: DEBUG nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1138.705300] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.302s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.705651] env[62974]: DEBUG nova.objects.instance [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'resources' on Instance uuid 2a962aab-3057-43df-97f7-b63ce808fb90 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.863306] env[62974]: DEBUG nova.compute.manager [req-fe7dbe1e-9c10-4c7c-ae55-072a6fbac920 req-0f9bc08f-59c2-4606-8593-a94d398e95a1 service nova] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Received event network-vif-deleted-5ea3e9de-b94a-4478-9c34-3ec161fff6e7 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1138.863306] env[62974]: DEBUG nova.compute.manager [req-fe7dbe1e-9c10-4c7c-ae55-072a6fbac920 req-0f9bc08f-59c2-4606-8593-a94d398e95a1 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Received event network-vif-deleted-c699c4dc-40cf-4eaa-9ba6-5e255a43e01a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1138.865346] env[62974]: INFO nova.compute.manager [req-fe7dbe1e-9c10-4c7c-ae55-072a6fbac920 req-0f9bc08f-59c2-4606-8593-a94d398e95a1 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Neutron deleted interface c699c4dc-40cf-4eaa-9ba6-5e255a43e01a; detaching it from the instance and deleting it from the info cache [ 1138.865576] env[62974]: DEBUG nova.network.neutron [req-fe7dbe1e-9c10-4c7c-ae55-072a6fbac920 req-0f9bc08f-59c2-4606-8593-a94d398e95a1 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.994454] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c7cf47-7f05-bb4a-8b36-4764147b5873, 'name': SearchDatastore_Task, 'duration_secs': 0.010406} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.994709] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.994954] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] d7ca15a3-edd2-48a2-9ee0-5d2072f1310a/d7ca15a3-edd2-48a2-9ee0-5d2072f1310a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1138.995259] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f5d8ead-139f-4dcf-9610-9cabec1a19c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.002494] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1139.002494] env[62974]: value = "task-2655128" [ 1139.002494] env[62974]: _type = "Task" [ 1139.002494] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.010036] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.102882] env[62974]: DEBUG nova.network.neutron [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Successfully updated port: 6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1139.209146] env[62974]: DEBUG nova.compute.utils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1139.215524] env[62974]: DEBUG nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1139.215904] env[62974]: DEBUG nova.network.neutron [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1139.270397] env[62974]: DEBUG nova.policy [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b837770f3f74a5fad99c7cc150e9cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '567f64e735384503b6c0172050bdfaf5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1139.325614] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8837b898-3823-43e3-a81f-83d7eea91339 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.330515] env[62974]: DEBUG nova.network.neutron [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.337328] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e744d5a2-4f44-4566-bf9a-213fe60d4a5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.375326] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-043bc8e0-cbf7-4745-bfd2-a34ed359a0f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.377966] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efef711-09b1-495b-ae5d-32e3fae45ef1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.386811] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdda4c12-cd77-4f16-8e25-b192bec17a17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.396975] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c3b621-dddf-46e2-a0a1-13077f58218e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.417055] env[62974]: DEBUG nova.compute.provider_tree [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.434361] env[62974]: DEBUG nova.compute.manager [req-fe7dbe1e-9c10-4c7c-ae55-072a6fbac920 req-0f9bc08f-59c2-4606-8593-a94d398e95a1 service nova] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Detach interface failed, port_id=c699c4dc-40cf-4eaa-9ba6-5e255a43e01a, reason: Instance c90c9a6d-661f-4574-8a0d-7d8cacf8618d could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1139.512472] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450129} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.512741] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] d7ca15a3-edd2-48a2-9ee0-5d2072f1310a/d7ca15a3-edd2-48a2-9ee0-5d2072f1310a.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1139.513096] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1139.513194] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b7a880a-5717-4c3e-8cf6-14b15f2dc1f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.519354] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1139.519354] env[62974]: value = "task-2655129" [ 1139.519354] env[62974]: _type = "Task" [ 1139.519354] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.527300] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.606300] env[62974]: DEBUG nova.network.neutron [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Successfully created port: b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.609066] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.609381] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.609511] env[62974]: DEBUG nova.network.neutron [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1139.716141] env[62974]: DEBUG nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1139.834574] env[62974]: INFO nova.compute.manager [-] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Took 1.71 seconds to deallocate network for instance. [ 1139.920960] env[62974]: DEBUG nova.scheduler.client.report [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1140.031335] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066126} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.032426] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1140.032513] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7411436-677b-4274-8153-3f3adc5d8062 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.055044] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] d7ca15a3-edd2-48a2-9ee0-5d2072f1310a/d7ca15a3-edd2-48a2-9ee0-5d2072f1310a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.055366] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07ce5d6d-6e31-43df-8e1a-6322c8097a89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.075934] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1140.075934] env[62974]: value = "task-2655130" [ 1140.075934] env[62974]: _type = "Task" [ 1140.075934] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.084524] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655130, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.159869] env[62974]: DEBUG nova.network.neutron [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1140.316104] env[62974]: DEBUG nova.network.neutron [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.343861] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.427544] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.432856] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.089s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.432856] env[62974]: DEBUG nova.objects.instance [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lazy-loading 'resources' on Instance uuid c90c9a6d-661f-4574-8a0d-7d8cacf8618d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.464714] env[62974]: INFO nova.scheduler.client.report [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted allocations for instance 2a962aab-3057-43df-97f7-b63ce808fb90 [ 1140.588927] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655130, 'name': ReconfigVM_Task, 'duration_secs': 0.308027} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.589247] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Reconfigured VM instance instance-0000006d to attach disk [datastore1] d7ca15a3-edd2-48a2-9ee0-5d2072f1310a/d7ca15a3-edd2-48a2-9ee0-5d2072f1310a.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.589884] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f13d0e0-43f7-4cce-83a1-73e2033d9ea9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.596819] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1140.596819] env[62974]: value = "task-2655131" [ 1140.596819] env[62974]: _type = "Task" [ 1140.596819] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.607146] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655131, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.726824] env[62974]: DEBUG nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1140.749558] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1140.749839] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1140.749965] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1140.750157] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1140.750302] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1140.750447] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1140.750702] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1140.750812] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1140.750974] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1140.751153] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1140.751326] env[62974]: DEBUG nova.virt.hardware [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1140.752231] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abecc18-1992-4c7e-ae3c-66660e1fbdfc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.760282] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da61759b-fa8f-4f7d-aaad-d22ab9aff6a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.818488] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.818836] env[62974]: DEBUG nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Instance network_info: |[{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1140.819293] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:58:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7f41333-42ee-47f3-936c-d6701ab786d2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6214f8c1-1172-4dbd-b021-d468e0b04110', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.827177] env[62974]: DEBUG oslo.service.loopingcall [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1140.827308] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1140.827541] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c73a2ae1-c08f-4312-b412-caf912dabc03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.846955] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.846955] env[62974]: value = "task-2655132" [ 1140.846955] env[62974]: _type = "Task" [ 1140.846955] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.854722] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655132, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.892243] env[62974]: DEBUG nova.compute.manager [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-vif-plugged-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.892770] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.892770] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.892869] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.892974] env[62974]: DEBUG nova.compute.manager [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] No waiting events found dispatching network-vif-plugged-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1140.894385] env[62974]: WARNING nova.compute.manager [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received unexpected event network-vif-plugged-6214f8c1-1172-4dbd-b021-d468e0b04110 for instance with vm_state building and task_state spawning. [ 1140.894385] env[62974]: DEBUG nova.compute.manager [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.894385] env[62974]: DEBUG nova.compute.manager [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing instance network info cache due to event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1140.894565] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.895320] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.895320] env[62974]: DEBUG nova.network.neutron [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.975588] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d4bd5c89-71ca-4776-b728-16328425c830 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "2a962aab-3057-43df-97f7-b63ce808fb90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.211s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.055916] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c05ea8-6d38-42ab-89e5-626fad6cc201 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.068234] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ff61b9-027b-4ea6-bea8-e72fa9386e35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.115621] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10223765-25c0-41eb-b27c-101f331fd324 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.129094] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904d8663-b883-4bd5-a53d-c38e219e3050 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.134829] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655131, 'name': Rename_Task, 'duration_secs': 0.146452} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.135088] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1141.135894] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06238e6a-cbf7-44d1-ac7e-54af06f07732 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.146885] env[62974]: DEBUG nova.compute.provider_tree [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.153019] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1141.153019] env[62974]: value = "task-2655133" [ 1141.153019] env[62974]: _type = "Task" [ 1141.153019] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.163629] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.209323] env[62974]: DEBUG nova.network.neutron [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Successfully updated port: b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1141.356951] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655132, 'name': CreateVM_Task, 'duration_secs': 0.327831} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.357142] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1141.357817] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.357952] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.358292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1141.358570] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a4a03ec-2041-4921-9957-b57fcfba2eab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.363183] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1141.363183] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e99208-7106-70b3-fa08-39b9e65b03b6" [ 1141.363183] env[62974]: _type = "Task" [ 1141.363183] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.371839] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e99208-7106-70b3-fa08-39b9e65b03b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.614698] env[62974]: DEBUG nova.network.neutron [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updated VIF entry in instance network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1141.615128] env[62974]: DEBUG nova.network.neutron [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.650962] env[62974]: DEBUG nova.scheduler.client.report [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1141.663073] env[62974]: DEBUG oslo_vmware.api [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655133, 'name': PowerOnVM_Task, 'duration_secs': 0.463356} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.663360] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.663527] env[62974]: INFO nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Took 7.01 seconds to spawn the instance on the hypervisor. [ 1141.663707] env[62974]: DEBUG nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1141.664491] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d4d53a-bd58-4827-a8e8-c06579de770a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.712678] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.712831] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.712985] env[62974]: DEBUG nova.network.neutron [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.873840] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e99208-7106-70b3-fa08-39b9e65b03b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009307} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.874174] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.874409] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1141.874644] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.874792] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.874967] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1141.875477] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89b8b246-5ad4-46c9-bf27-4c1af637c7fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.883859] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1141.884045] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1141.884744] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a24a551-18c9-46db-9b94-2cba54348c0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.889550] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1141.889550] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a795ca-7202-5726-1518-d1421cb873c1" [ 1141.889550] env[62974]: _type = "Task" [ 1141.889550] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.897570] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a795ca-7202-5726-1518-d1421cb873c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.968548] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.968828] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.117709] env[62974]: DEBUG oslo_concurrency.lockutils [req-f3d1880a-f261-43c8-bca0-8a64c07fac50 req-a076735e-2706-4fd5-8cfa-ec914c6cdfae service nova] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.157879] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.179917] env[62974]: INFO nova.scheduler.client.report [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Deleted allocations for instance c90c9a6d-661f-4574-8a0d-7d8cacf8618d [ 1142.184378] env[62974]: INFO nova.compute.manager [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Took 13.44 seconds to build instance. [ 1142.248099] env[62974]: DEBUG nova.network.neutron [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1142.366564] env[62974]: DEBUG nova.network.neutron [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.399352] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a795ca-7202-5726-1518-d1421cb873c1, 'name': SearchDatastore_Task, 'duration_secs': 0.008756} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.400126] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53ba9408-616a-4d8e-b51c-c4f605d7958b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.404917] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1142.404917] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fb01e4-3464-c9ba-ae24-e2abd197c3d6" [ 1142.404917] env[62974]: _type = "Task" [ 1142.404917] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.412454] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fb01e4-3464-c9ba-ae24-e2abd197c3d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.472102] env[62974]: INFO nova.compute.manager [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Detaching volume 991a4610-c157-453f-b11c-c96faca0b73a [ 1142.506551] env[62974]: INFO nova.virt.block_device [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Attempting to driver detach volume 991a4610-c157-453f-b11c-c96faca0b73a from mountpoint /dev/sdb [ 1142.506788] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1142.506975] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535487', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'name': 'volume-991a4610-c157-453f-b11c-c96faca0b73a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4de11643-da0a-453f-b03e-ca19819f4f06', 'attached_at': '', 'detached_at': '', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'serial': '991a4610-c157-453f-b11c-c96faca0b73a'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1142.507856] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2795f0d6-c0e3-49f1-a4fd-0edd16c515da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.529252] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866e7a91-0beb-4363-a8ac-d645e9c6e3ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.536203] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9246b690-eb13-47d7-b234-7601c97856ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.557271] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9f1c01-efd8-4fef-9f54-a3513f96ddfa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.572269] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] The volume has not been displaced from its original location: [datastore2] volume-991a4610-c157-453f-b11c-c96faca0b73a/volume-991a4610-c157-453f-b11c-c96faca0b73a.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1142.577390] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Reconfiguring VM instance instance-00000063 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1142.577720] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-238953c0-1842-4d77-90b5-a3413bca6e45 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.596789] env[62974]: DEBUG oslo_vmware.api [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1142.596789] env[62974]: value = "task-2655137" [ 1142.596789] env[62974]: _type = "Task" [ 1142.596789] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.605097] env[62974]: DEBUG oslo_vmware.api [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655137, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.689024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fe2ad92e-4920-42af-8dad-46e337c7b064 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.950s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.689611] env[62974]: DEBUG oslo_concurrency.lockutils [None req-52de5043-d6ed-4edc-b34e-f25cde7c8daf tempest-AttachVolumeShelveTestJSON-1864537515 tempest-AttachVolumeShelveTestJSON-1864537515-project-member] Lock "c90c9a6d-661f-4574-8a0d-7d8cacf8618d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.197s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.869047] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.869404] env[62974]: DEBUG nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Instance network_info: |[{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1142.869848] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:df:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b715d91d-19dc-4ecd-9d75-e57c620d897a', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.878438] env[62974]: DEBUG oslo.service.loopingcall [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.878733] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1142.879050] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4690688c-7faf-42ba-8760-21ab3dad9524 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.911515] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1142.911515] env[62974]: value = "task-2655138" [ 1142.911515] env[62974]: _type = "Task" [ 1142.911515] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.920798] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52fb01e4-3464-c9ba-ae24-e2abd197c3d6, 'name': SearchDatastore_Task, 'duration_secs': 0.008874} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.921621] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.921676] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1142.921938] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-709f0694-411e-46b2-962c-ed5f1e8cfd7c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.927841] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655138, 'name': CreateVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.932637] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1142.932637] env[62974]: value = "task-2655139" [ 1142.932637] env[62974]: _type = "Task" [ 1142.932637] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.941633] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.963289] env[62974]: DEBUG nova.compute.manager [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Received event network-vif-plugged-b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1142.963525] env[62974]: DEBUG oslo_concurrency.lockutils [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] Acquiring lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.963765] env[62974]: DEBUG oslo_concurrency.lockutils [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] Lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.963951] env[62974]: DEBUG oslo_concurrency.lockutils [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] Lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.964558] env[62974]: DEBUG nova.compute.manager [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] No waiting events found dispatching network-vif-plugged-b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1142.964812] env[62974]: WARNING nova.compute.manager [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Received unexpected event network-vif-plugged-b715d91d-19dc-4ecd-9d75-e57c620d897a for instance with vm_state building and task_state spawning. [ 1142.965015] env[62974]: DEBUG nova.compute.manager [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Received event network-changed-b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1142.965204] env[62974]: DEBUG nova.compute.manager [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Refreshing instance network info cache due to event network-changed-b715d91d-19dc-4ecd-9d75-e57c620d897a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1142.965472] env[62974]: DEBUG oslo_concurrency.lockutils [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] Acquiring lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.965641] env[62974]: DEBUG oslo_concurrency.lockutils [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] Acquired lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.965880] env[62974]: DEBUG nova.network.neutron [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Refreshing network info cache for port b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1143.111623] env[62974]: DEBUG oslo_vmware.api [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655137, 'name': ReconfigVM_Task, 'duration_secs': 0.234209} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.111623] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Reconfigured VM instance instance-00000063 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1143.117567] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af10405a-bf56-44bf-ab15-2ab58e738dc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.147752] env[62974]: DEBUG oslo_vmware.api [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1143.147752] env[62974]: value = "task-2655140" [ 1143.147752] env[62974]: _type = "Task" [ 1143.147752] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.162349] env[62974]: DEBUG oslo_vmware.api [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655140, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.421250] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655138, 'name': CreateVM_Task, 'duration_secs': 0.343682} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.421551] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1143.422121] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.422283] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.422615] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1143.422867] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05b07793-8fe9-43a2-bd5c-c4ffb66a027f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.428422] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1143.428422] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5211b347-741d-5994-a362-8e50df88d7a1" [ 1143.428422] env[62974]: _type = "Task" [ 1143.428422] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.445896] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5211b347-741d-5994-a362-8e50df88d7a1, 'name': SearchDatastore_Task, 'duration_secs': 0.008985} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.449757] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.450147] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.450484] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.450758] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.451045] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.451419] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482778} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.451799] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed925ad4-ae23-491f-8055-dbb61a6a28fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.455299] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1143.455636] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1143.456090] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45d0251c-43d8-47fc-bfa7-a8dd4731ba2a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.463517] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1143.463517] env[62974]: value = "task-2655141" [ 1143.463517] env[62974]: _type = "Task" [ 1143.463517] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.466114] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.466114] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.468884] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5426a1e9-c51c-487e-8e21-6c61de8357cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.483847] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.486283] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1143.486283] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ad8819-428c-81de-0b3c-3886340e1c3b" [ 1143.486283] env[62974]: _type = "Task" [ 1143.486283] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.507035] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52ad8819-428c-81de-0b3c-3886340e1c3b, 'name': SearchDatastore_Task, 'duration_secs': 0.011043} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.507035] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e839a83-9331-4ed8-b64d-9c3fd77f6378 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.512563] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1143.512563] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524c5c6f-aa10-5af2-6c43-e71e0a3e3807" [ 1143.512563] env[62974]: _type = "Task" [ 1143.512563] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.528576] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524c5c6f-aa10-5af2-6c43-e71e0a3e3807, 'name': SearchDatastore_Task, 'duration_secs': 0.010523} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.528867] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.529252] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1143.534239] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f916b226-931c-4956-9286-0b4bb72e9268 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.546151] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1143.546151] env[62974]: value = "task-2655142" [ 1143.546151] env[62974]: _type = "Task" [ 1143.546151] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.558077] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.657662] env[62974]: DEBUG oslo_vmware.api [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655140, 'name': ReconfigVM_Task, 'duration_secs': 0.181431} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.657810] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535487', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'name': 'volume-991a4610-c157-453f-b11c-c96faca0b73a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4de11643-da0a-453f-b03e-ca19819f4f06', 'attached_at': '', 'detached_at': '', 'volume_id': '991a4610-c157-453f-b11c-c96faca0b73a', 'serial': '991a4610-c157-453f-b11c-c96faca0b73a'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1143.706656] env[62974]: DEBUG nova.network.neutron [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updated VIF entry in instance network info cache for port b715d91d-19dc-4ecd-9d75-e57c620d897a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1143.707052] env[62974]: DEBUG nova.network.neutron [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.980908] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079052} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.980908] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1143.981389] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2effb986-debe-4a13-8e5d-e44b5e8dd78f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.010399] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.011125] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04250f47-deda-40f3-a614-faf6d3860c6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.040598] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1144.040598] env[62974]: value = "task-2655143" [ 1144.040598] env[62974]: _type = "Task" [ 1144.040598] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.054515] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655143, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.061720] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655142, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.204221] env[62974]: DEBUG nova.objects.instance [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'flavor' on Instance uuid 4de11643-da0a-453f-b03e-ca19819f4f06 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.210663] env[62974]: DEBUG oslo_concurrency.lockutils [req-8799b8dd-a9b5-4d92-8fd0-11f27937ce7f req-d3a73d81-f4f1-4b2e-b8b4-dd5fc383a42c service nova] Releasing lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.428995] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "007a5e28-7891-4327-ba39-bb9da8e32495" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.429254] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "007a5e28-7891-4327-ba39-bb9da8e32495" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.558021] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655143, 'name': ReconfigVM_Task, 'duration_secs': 0.3319} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.558021] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.558021] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92a510e9-bdfa-4aeb-b56c-7d95d51c30ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.562276] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655142, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.569726] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1144.569726] env[62974]: value = "task-2655145" [ 1144.569726] env[62974]: _type = "Task" [ 1144.569726] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.580840] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655145, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.933861] env[62974]: DEBUG nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1144.990122] env[62974]: DEBUG nova.compute.manager [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1144.990371] env[62974]: DEBUG nova.compute.manager [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing instance network info cache due to event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1144.990633] env[62974]: DEBUG oslo_concurrency.lockutils [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.990817] env[62974]: DEBUG oslo_concurrency.lockutils [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.992947] env[62974]: DEBUG nova.network.neutron [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.058751] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655142, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.45346} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.059633] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1145.059633] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1145.059835] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95087fae-a261-40ab-acb2-a01cf3da3b53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.069989] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1145.069989] env[62974]: value = "task-2655146" [ 1145.069989] env[62974]: _type = "Task" [ 1145.069989] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.085450] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.085747] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655145, 'name': Rename_Task, 'duration_secs': 0.353012} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.085996] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.088595] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72caa377-b140-425b-a677-fc791e387b77 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.095959] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1145.095959] env[62974]: value = "task-2655147" [ 1145.095959] env[62974]: _type = "Task" [ 1145.095959] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.112792] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.214106] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f096dbcf-f896-4470-a89c-8a20064d238b tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.244s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.462019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.462324] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.464183] env[62974]: INFO nova.compute.claims [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1145.583644] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14789} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.583644] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1145.584299] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014bca8c-49d8-42b9-ab5e-1708997b24ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.609936] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.613559] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38484077-2de2-453a-9f8f-f2a00659f3b2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.640393] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655147, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.641911] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1145.641911] env[62974]: value = "task-2655148" [ 1145.641911] env[62974]: _type = "Task" [ 1145.641911] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.656938] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655148, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.792387] env[62974]: DEBUG nova.network.neutron [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updated VIF entry in instance network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.793122] env[62974]: DEBUG nova.network.neutron [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.112763] env[62974]: DEBUG oslo_vmware.api [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655147, 'name': PowerOnVM_Task, 'duration_secs': 0.611289} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.113110] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1146.113277] env[62974]: INFO nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1146.113458] env[62974]: DEBUG nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.114306] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3054a52c-6225-491f-a21f-cadaea3c3b8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.152543] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655148, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.295796] env[62974]: DEBUG oslo_concurrency.lockutils [req-4d9ab08e-d1bb-413a-89a4-6ca1c476c5c8 req-e4fde53e-e8dc-45e3-ae84-f9779b0645a1 service nova] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.296319] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.296542] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.296786] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "4de11643-da0a-453f-b03e-ca19819f4f06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.296980] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.297165] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.299460] env[62974]: INFO nova.compute.manager [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Terminating instance [ 1146.555629] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf3a99a-93f3-406c-aba0-573f002633ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.563589] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354389b8-4885-45cd-84bf-8c293f2f99ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.595853] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660d2c96-a38b-4c9e-8edd-1ed0b023459e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.603680] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f087a71-51f5-4abe-b848-67d203e483cc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.617410] env[62974]: DEBUG nova.compute.provider_tree [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.631365] env[62974]: INFO nova.compute.manager [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Took 14.99 seconds to build instance. [ 1146.654581] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655148, 'name': ReconfigVM_Task, 'duration_secs': 0.908356} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.654865] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.655496] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29e6f8d4-b6c7-45b3-9d7f-67bf75ae89cf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.662471] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1146.662471] env[62974]: value = "task-2655150" [ 1146.662471] env[62974]: _type = "Task" [ 1146.662471] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.671680] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655150, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.805448] env[62974]: DEBUG nova.compute.manager [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1146.805739] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1146.806665] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a757155-b4d1-4587-8d96-a9513f6bf931 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.815468] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.815785] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c423f21-14d4-4650-a863-7b3eb93d0b57 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.822950] env[62974]: DEBUG oslo_vmware.api [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1146.822950] env[62974]: value = "task-2655151" [ 1146.822950] env[62974]: _type = "Task" [ 1146.822950] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.831433] env[62974]: DEBUG oslo_vmware.api [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.120775] env[62974]: DEBUG nova.scheduler.client.report [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1147.133602] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3d25552d-53e2-4657-964b-3bce9b77d06a tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.497s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.174034] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655150, 'name': Rename_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.323266] env[62974]: DEBUG nova.compute.manager [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1147.323266] env[62974]: DEBUG nova.compute.manager [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing instance network info cache due to event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1147.323266] env[62974]: DEBUG oslo_concurrency.lockutils [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.323266] env[62974]: DEBUG oslo_concurrency.lockutils [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.323995] env[62974]: DEBUG nova.network.neutron [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1147.336681] env[62974]: DEBUG oslo_vmware.api [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.626370] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.164s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.627101] env[62974]: DEBUG nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1147.673773] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655150, 'name': Rename_Task, 'duration_secs': 0.987999} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.674126] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.674417] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-742e2226-9d12-495c-9b60-3c83836b5f35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.683865] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1147.683865] env[62974]: value = "task-2655152" [ 1147.683865] env[62974]: _type = "Task" [ 1147.683865] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.692158] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.839495] env[62974]: DEBUG oslo_vmware.api [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655151, 'name': PowerOffVM_Task, 'duration_secs': 0.932685} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.839681] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1147.839824] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1147.840309] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-824b2334-3b5b-40f1-a194-9eb6901c07fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.917555] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1147.917854] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1147.918126] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleting the datastore file [datastore2] 4de11643-da0a-453f-b03e-ca19819f4f06 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1147.918451] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81db1517-9730-4836-8a0f-eb6f17e69343 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.925808] env[62974]: DEBUG oslo_vmware.api [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1147.925808] env[62974]: value = "task-2655154" [ 1147.925808] env[62974]: _type = "Task" [ 1147.925808] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.934776] env[62974]: DEBUG oslo_vmware.api [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.071221] env[62974]: DEBUG nova.network.neutron [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updated VIF entry in instance network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1148.071716] env[62974]: DEBUG nova.network.neutron [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.133028] env[62974]: DEBUG nova.compute.utils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1148.134489] env[62974]: DEBUG nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1148.134643] env[62974]: DEBUG nova.network.neutron [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1148.181934] env[62974]: DEBUG nova.policy [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1148.195628] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655152, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.438011] env[62974]: DEBUG oslo_vmware.api [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154577} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.438011] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1148.438011] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1148.438011] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1148.438254] env[62974]: INFO nova.compute.manager [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1148.439031] env[62974]: DEBUG oslo.service.loopingcall [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1148.439031] env[62974]: DEBUG nova.compute.manager [-] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1148.439031] env[62974]: DEBUG nova.network.neutron [-] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1148.500803] env[62974]: DEBUG nova.network.neutron [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Successfully created port: 84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1148.574641] env[62974]: DEBUG oslo_concurrency.lockutils [req-dfb79f08-97b0-4cb4-a8f2-d79363e313fc req-5e87f6f8-b9ed-4eae-bdf6-e04ff8bf9d1c service nova] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.637397] env[62974]: DEBUG nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1148.695556] env[62974]: DEBUG oslo_vmware.api [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655152, 'name': PowerOnVM_Task, 'duration_secs': 0.529394} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.695834] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.696047] env[62974]: INFO nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1148.696231] env[62974]: DEBUG nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.696995] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5d0182-c8f9-47bb-9f2e-23a13a41dec3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.076374] env[62974]: DEBUG nova.compute.manager [req-a406f8ba-0b41-447c-ae49-8cfdc5853034 req-837ffc17-8bb6-4009-9549-840f3900a96a service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Received event network-vif-deleted-298eccf6-2f42-4f6e-99da-2695849a3163 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1149.076556] env[62974]: INFO nova.compute.manager [req-a406f8ba-0b41-447c-ae49-8cfdc5853034 req-837ffc17-8bb6-4009-9549-840f3900a96a service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Neutron deleted interface 298eccf6-2f42-4f6e-99da-2695849a3163; detaching it from the instance and deleting it from the info cache [ 1149.076833] env[62974]: DEBUG nova.network.neutron [req-a406f8ba-0b41-447c-ae49-8cfdc5853034 req-837ffc17-8bb6-4009-9549-840f3900a96a service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.216522] env[62974]: INFO nova.compute.manager [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Took 16.15 seconds to build instance. [ 1149.558389] env[62974]: DEBUG nova.network.neutron [-] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.579623] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3323175-15b9-4220-b10d-815ac89c933d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.592092] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28063ce6-094f-4c58-88df-f26496131542 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.622308] env[62974]: DEBUG nova.compute.manager [req-a406f8ba-0b41-447c-ae49-8cfdc5853034 req-837ffc17-8bb6-4009-9549-840f3900a96a service nova] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Detach interface failed, port_id=298eccf6-2f42-4f6e-99da-2695849a3163, reason: Instance 4de11643-da0a-453f-b03e-ca19819f4f06 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1149.647328] env[62974]: DEBUG nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1149.676343] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.676613] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.676771] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.676951] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.677110] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.677257] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.677460] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.677616] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.677833] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.678040] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.678222] env[62974]: DEBUG nova.virt.hardware [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.679084] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4d3d96-815e-4662-a960-6a0b0664348a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.687725] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe56126-d5e0-4b9a-8f72-fe95264e0483 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.718778] env[62974]: DEBUG oslo_concurrency.lockutils [None req-64cac1e1-dff0-4db3-8430-45e4f75f4031 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.655s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.063966] env[62974]: INFO nova.compute.manager [-] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Took 1.62 seconds to deallocate network for instance. [ 1150.141267] env[62974]: DEBUG nova.network.neutron [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Successfully updated port: 84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1150.572074] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.572439] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.572530] env[62974]: DEBUG nova.objects.instance [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'resources' on Instance uuid 4de11643-da0a-453f-b03e-ca19819f4f06 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.644723] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.644964] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.645435] env[62974]: DEBUG nova.network.neutron [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1151.112736] env[62974]: DEBUG nova.compute.manager [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Received event network-changed-b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1151.112939] env[62974]: DEBUG nova.compute.manager [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Refreshing instance network info cache due to event network-changed-b715d91d-19dc-4ecd-9d75-e57c620d897a. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1151.113209] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Acquiring lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.113354] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Acquired lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.113514] env[62974]: DEBUG nova.network.neutron [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Refreshing network info cache for port b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.162040] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e23ad8-78e0-4afa-9547-d0f72b32da4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.169773] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b323490-4163-4267-99ba-ca7c90595f02 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.204947] env[62974]: DEBUG nova.network.neutron [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1151.207359] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c937ea9-2ec1-49cf-a977-288f0dd705e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.216040] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136533bf-d366-4f2d-a730-4375baab3dea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.232722] env[62974]: DEBUG nova.compute.provider_tree [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.351650] env[62974]: DEBUG nova.network.neutron [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.738230] env[62974]: DEBUG nova.scheduler.client.report [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.815754] env[62974]: DEBUG nova.network.neutron [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updated VIF entry in instance network info cache for port b715d91d-19dc-4ecd-9d75-e57c620d897a. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.816213] env[62974]: DEBUG nova.network.neutron [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.854929] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.855282] env[62974]: DEBUG nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Instance network_info: |[{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1151.855717] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:d8:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84fb0bcd-a98e-4006-bc29-19f86ad7822d', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1151.863465] env[62974]: DEBUG oslo.service.loopingcall [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1151.863946] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1151.864202] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-804cbc36-ca20-44e8-9d80-9eb163423464 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.884507] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1151.884507] env[62974]: value = "task-2655157" [ 1151.884507] env[62974]: _type = "Task" [ 1151.884507] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.895381] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655157, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.243954] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.262776] env[62974]: INFO nova.scheduler.client.report [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted allocations for instance 4de11643-da0a-453f-b03e-ca19819f4f06 [ 1152.318651] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Releasing lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.319079] env[62974]: DEBUG nova.compute.manager [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-vif-plugged-84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1152.319309] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Acquiring lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.319539] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.319730] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.319914] env[62974]: DEBUG nova.compute.manager [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] No waiting events found dispatching network-vif-plugged-84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1152.320094] env[62974]: WARNING nova.compute.manager [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received unexpected event network-vif-plugged-84fb0bcd-a98e-4006-bc29-19f86ad7822d for instance with vm_state building and task_state spawning. [ 1152.320262] env[62974]: DEBUG nova.compute.manager [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1152.320417] env[62974]: DEBUG nova.compute.manager [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing instance network info cache due to event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1152.320601] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.320738] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.320906] env[62974]: DEBUG nova.network.neutron [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1152.396657] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655157, 'name': CreateVM_Task, 'duration_secs': 0.364063} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.396873] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1152.404503] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.404675] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.404994] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1152.405267] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e64a145-6a15-46cf-89cb-dbc7ae6d044a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.409949] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1152.409949] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52068e3b-be7b-f4e9-2b07-46b14860909a" [ 1152.409949] env[62974]: _type = "Task" [ 1152.409949] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.418029] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52068e3b-be7b-f4e9-2b07-46b14860909a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.771017] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1d960f26-a71a-475c-bca3-06b3293a8c42 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "4de11643-da0a-453f-b03e-ca19819f4f06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.474s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.920449] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52068e3b-be7b-f4e9-2b07-46b14860909a, 'name': SearchDatastore_Task, 'duration_secs': 0.010141} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.921110] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.921110] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.921282] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.921349] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.921531] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.923771] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2167e761-70c4-487e-b67e-5fef94a1e950 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.933716] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.933891] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.934645] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd61e55c-5086-4ffb-af77-4b3a361dd808 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.939930] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1152.939930] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb865c-e38e-5415-e4bf-1d69891b0515" [ 1152.939930] env[62974]: _type = "Task" [ 1152.939930] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.948666] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb865c-e38e-5415-e4bf-1d69891b0515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.039274] env[62974]: DEBUG nova.network.neutron [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updated VIF entry in instance network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1153.039659] env[62974]: DEBUG nova.network.neutron [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.451250] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52eb865c-e38e-5415-e4bf-1d69891b0515, 'name': SearchDatastore_Task, 'duration_secs': 0.010403} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.452095] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b39275f-b122-4650-bcdb-fe4e71a0b967 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.457480] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1153.457480] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5260b15b-40e9-be4d-7a49-185ea9ca2363" [ 1153.457480] env[62974]: _type = "Task" [ 1153.457480] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.466476] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5260b15b-40e9-be4d-7a49-185ea9ca2363, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.542349] env[62974]: DEBUG oslo_concurrency.lockutils [req-500f86fc-1a27-416c-a0fd-d3459bd8b8cd req-2b3a7dee-3b9b-4d66-a89d-c8ee1b760cdf service nova] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.968717] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5260b15b-40e9-be4d-7a49-185ea9ca2363, 'name': SearchDatastore_Task, 'duration_secs': 0.010586} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.969050] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.969281] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 007a5e28-7891-4327-ba39-bb9da8e32495/007a5e28-7891-4327-ba39-bb9da8e32495.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.969547] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffff533f-81a4-45cc-889b-a9c2f4bb2667 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.977940] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1153.977940] env[62974]: value = "task-2655159" [ 1153.977940] env[62974]: _type = "Task" [ 1153.977940] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.986229] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.430386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "255a1d01-e007-45e5-a2c9-798223f41b30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.430626] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.489526] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473139} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.489836] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 007a5e28-7891-4327-ba39-bb9da8e32495/007a5e28-7891-4327-ba39-bb9da8e32495.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1154.490091] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1154.490368] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c65b4b4e-38d0-4579-a24d-9cf7be719aff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.497145] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1154.497145] env[62974]: value = "task-2655161" [ 1154.497145] env[62974]: _type = "Task" [ 1154.497145] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.505779] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655161, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.932766] env[62974]: DEBUG nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1155.008776] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655161, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066215} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.009140] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1155.009755] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868e34ea-e5cf-4152-8f2b-33f5807afc15 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.031504] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 007a5e28-7891-4327-ba39-bb9da8e32495/007a5e28-7891-4327-ba39-bb9da8e32495.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.031794] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19cc785c-4a28-48d0-8a68-2d3de6debd38 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.052382] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1155.052382] env[62974]: value = "task-2655162" [ 1155.052382] env[62974]: _type = "Task" [ 1155.052382] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.060767] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.161242] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.161454] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.161667] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1155.454315] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.454574] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.456095] env[62974]: INFO nova.compute.claims [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.563222] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655162, 'name': ReconfigVM_Task, 'duration_secs': 0.275977} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.563512] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 007a5e28-7891-4327-ba39-bb9da8e32495/007a5e28-7891-4327-ba39-bb9da8e32495.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.564141] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-496a8e2b-5113-4b00-8c76-17972ac528a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.570928] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1155.570928] env[62974]: value = "task-2655163" [ 1155.570928] env[62974]: _type = "Task" [ 1155.570928] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.579129] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655163, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.082917] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655163, 'name': Rename_Task, 'duration_secs': 0.145931} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.083244] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1156.083444] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2e16aa7-54cd-4b67-a5da-c4c67afe3599 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.090623] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1156.090623] env[62974]: value = "task-2655164" [ 1156.090623] env[62974]: _type = "Task" [ 1156.090623] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.098704] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.550988] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccdb1d7-e05d-4896-a4ac-d44ea8dcb915 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.560750] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600d076d-17e4-4a3b-ba3c-99a53921de80 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.599305] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a5b287-3c35-4375-b077-4f89cfc2f2a2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.607438] env[62974]: DEBUG oslo_vmware.api [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655164, 'name': PowerOnVM_Task, 'duration_secs': 0.491731} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.609539] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1156.609746] env[62974]: INFO nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Took 6.96 seconds to spawn the instance on the hypervisor. [ 1156.609924] env[62974]: DEBUG nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.610710] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9d785e-7cd2-499c-8548-7ca6cf489ca3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.614024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c96e2fd-46dd-4b76-993d-2ceff42118d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.632444] env[62974]: DEBUG nova.compute.provider_tree [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.139941] env[62974]: DEBUG nova.scheduler.client.report [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.144469] env[62974]: INFO nova.compute.manager [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Took 11.71 seconds to build instance. [ 1157.646080] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.646616] env[62974]: DEBUG nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1157.649448] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c3b525dc-e7be-42b0-8984-7ec861a83756 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "007a5e28-7891-4327-ba39-bb9da8e32495" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.220s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.803200] env[62974]: DEBUG nova.compute.manager [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1157.803200] env[62974]: DEBUG nova.compute.manager [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing instance network info cache due to event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1157.803200] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1157.803524] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.803830] env[62974]: DEBUG nova.network.neutron [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1158.151448] env[62974]: DEBUG nova.compute.utils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1158.156021] env[62974]: DEBUG nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1158.156021] env[62974]: DEBUG nova.network.neutron [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1158.200691] env[62974]: DEBUG nova.policy [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c69e5ea97264d57978ddcb94ef4bc41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43dc876c8a2346c7bca249407fb7fed8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1158.623910] env[62974]: DEBUG nova.network.neutron [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updated VIF entry in instance network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1158.624470] env[62974]: DEBUG nova.network.neutron [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.648995] env[62974]: DEBUG nova.network.neutron [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Successfully created port: c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1158.658557] env[62974]: DEBUG nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1158.683867] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Didn't find any instances for network info cache update. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1158.683867] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.684040] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.684227] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.684381] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.684525] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.684678] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.684806] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1158.684949] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.128231] env[62974]: DEBUG oslo_concurrency.lockutils [req-b1b3f166-111c-4418-b12e-7f2f1533b4ea req-952d2be9-7745-47bb-8efe-c17c4f389d95 service nova] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.191065] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.191539] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.191727] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.191992] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1159.193295] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68dc495-de27-4a3d-ac7b-73650b173422 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.207156] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0370de9f-cace-4942-8970-6a4d89520ec9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.229077] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12ceda9-9dc6-4d3c-b213-864f25488b69 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.239980] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18fd528-0643-4960-816e-aaff05ef0590 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.273361] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179408MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1159.273529] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.273721] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.668994] env[62974]: DEBUG nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1159.696653] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.696965] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.697122] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.697314] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.697485] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.697641] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.697851] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.698038] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.698212] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.698428] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.698566] env[62974]: DEBUG nova.virt.hardware [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.699506] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9944fb-dad1-4fcf-b748-d7bcec115306 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.708488] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4913dbeb-133e-487b-87a2-3dc49e0945bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.832601] env[62974]: DEBUG nova.compute.manager [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1159.832869] env[62974]: DEBUG nova.compute.manager [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing instance network info cache due to event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1159.833281] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.833405] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.833557] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1160.051161] env[62974]: DEBUG nova.compute.manager [req-023abdb1-8101-4a92-9fe1-713db3a2e557 req-8eb43af4-e7ef-4c90-b970-a9485f9b8c43 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Received event network-vif-plugged-c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1160.051161] env[62974]: DEBUG oslo_concurrency.lockutils [req-023abdb1-8101-4a92-9fe1-713db3a2e557 req-8eb43af4-e7ef-4c90-b970-a9485f9b8c43 service nova] Acquiring lock "255a1d01-e007-45e5-a2c9-798223f41b30-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.051161] env[62974]: DEBUG oslo_concurrency.lockutils [req-023abdb1-8101-4a92-9fe1-713db3a2e557 req-8eb43af4-e7ef-4c90-b970-a9485f9b8c43 service nova] Lock "255a1d01-e007-45e5-a2c9-798223f41b30-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.051585] env[62974]: DEBUG oslo_concurrency.lockutils [req-023abdb1-8101-4a92-9fe1-713db3a2e557 req-8eb43af4-e7ef-4c90-b970-a9485f9b8c43 service nova] Lock "255a1d01-e007-45e5-a2c9-798223f41b30-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.051942] env[62974]: DEBUG nova.compute.manager [req-023abdb1-8101-4a92-9fe1-713db3a2e557 req-8eb43af4-e7ef-4c90-b970-a9485f9b8c43 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] No waiting events found dispatching network-vif-plugged-c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1160.052288] env[62974]: WARNING nova.compute.manager [req-023abdb1-8101-4a92-9fe1-713db3a2e557 req-8eb43af4-e7ef-4c90-b970-a9485f9b8c43 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Received unexpected event network-vif-plugged-c013e4f0-4d15-4230-bcb4-15cbadf79757 for instance with vm_state building and task_state spawning. [ 1160.137378] env[62974]: DEBUG nova.network.neutron [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Successfully updated port: c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1160.300073] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance d7ca15a3-edd2-48a2-9ee0-5d2072f1310a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.300073] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 521b463f-98f9-4365-b446-5de9af79f220 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.300073] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 744a685d-845e-4818-abb5-c70056fd4cd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.300433] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 007a5e28-7891-4327-ba39-bb9da8e32495 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.300433] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 255a1d01-e007-45e5-a2c9-798223f41b30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.523256] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updated VIF entry in instance network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.523659] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.569125] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquiring lock "a94cb966-5304-4484-8639-899d7211e8b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.569343] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "a94cb966-5304-4484-8639-899d7211e8b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.639449] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.639588] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.639810] env[62974]: DEBUG nova.network.neutron [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1160.802524] env[62974]: INFO nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance a94cb966-5304-4484-8639-899d7211e8b6 has allocations against this compute host but is not found in the database. [ 1160.802802] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1160.802987] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1160.878777] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28217753-3eef-4333-ae5b-88afce260ae8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.887073] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f606769f-5ddc-4252-bd90-fe2eb1f147e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.919611] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c88bfe-0649-45b0-bf40-5edf0e5833da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.928098] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d62fa9-9480-41b4-b863-9189e2b54e67 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.943316] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.026718] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.028105] env[62974]: DEBUG nova.compute.manager [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1161.028105] env[62974]: DEBUG nova.compute.manager [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing instance network info cache due to event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1161.028105] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.028105] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.028105] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.071240] env[62974]: DEBUG nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1161.177898] env[62974]: DEBUG nova.network.neutron [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1161.314340] env[62974]: DEBUG nova.network.neutron [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Updating instance_info_cache with network_info: [{"id": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "address": "fa:16:3e:81:48:5d", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc013e4f0-4d", "ovs_interfaceid": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.447029] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1161.595209] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.729920] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updated VIF entry in instance network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1161.730393] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.816965] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.817393] env[62974]: DEBUG nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Instance network_info: |[{"id": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "address": "fa:16:3e:81:48:5d", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc013e4f0-4d", "ovs_interfaceid": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1161.817917] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:48:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c013e4f0-4d15-4230-bcb4-15cbadf79757', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1161.829483] env[62974]: DEBUG oslo.service.loopingcall [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1161.829779] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1161.830102] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72e4da26-afc4-44eb-88c5-6f7e9474bb9a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.859413] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1161.859413] env[62974]: value = "task-2655166" [ 1161.859413] env[62974]: _type = "Task" [ 1161.859413] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.872418] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655166, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.952617] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1161.952845] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.679s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.953081] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.358s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.954681] env[62974]: INFO nova.compute.claims [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1162.078771] env[62974]: DEBUG nova.compute.manager [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Received event network-changed-c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1162.078973] env[62974]: DEBUG nova.compute.manager [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Refreshing instance network info cache due to event network-changed-c013e4f0-4d15-4230-bcb4-15cbadf79757. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1162.079348] env[62974]: DEBUG oslo_concurrency.lockutils [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] Acquiring lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1162.079564] env[62974]: DEBUG oslo_concurrency.lockutils [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] Acquired lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.079804] env[62974]: DEBUG nova.network.neutron [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Refreshing network info cache for port c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.234033] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1162.234033] env[62974]: DEBUG nova.compute.manager [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1162.234033] env[62974]: DEBUG nova.compute.manager [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing instance network info cache due to event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1162.234033] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1162.234033] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.234352] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.370393] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655166, 'name': CreateVM_Task, 'duration_secs': 0.343968} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.370751] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1162.371382] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1162.371499] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.371866] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1162.372434] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a545885d-f47e-4db9-9315-1cc3f2fbb7d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.377303] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1162.377303] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c8b89d-3802-a62d-1f28-7bcd0016e61e" [ 1162.377303] env[62974]: _type = "Task" [ 1162.377303] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.384987] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c8b89d-3802-a62d-1f28-7bcd0016e61e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.775352] env[62974]: DEBUG nova.network.neutron [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Updated VIF entry in instance network info cache for port c013e4f0-4d15-4230-bcb4-15cbadf79757. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.775764] env[62974]: DEBUG nova.network.neutron [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Updating instance_info_cache with network_info: [{"id": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "address": "fa:16:3e:81:48:5d", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc013e4f0-4d", "ovs_interfaceid": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.892482] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c8b89d-3802-a62d-1f28-7bcd0016e61e, 'name': SearchDatastore_Task, 'duration_secs': 0.011488} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.892895] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1162.893335] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1162.893539] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1162.893757] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.893995] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1162.894359] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a1e4462-00c6-402c-b12f-c6df151c0be2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.904861] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1162.905130] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1162.906164] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-822361fd-695f-4cd1-8e96-28d5096b8efa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.912975] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1162.912975] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f9023b-13b7-c0c9-4d0d-f32569cd3901" [ 1162.912975] env[62974]: _type = "Task" [ 1162.912975] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.928384] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f9023b-13b7-c0c9-4d0d-f32569cd3901, 'name': SearchDatastore_Task, 'duration_secs': 0.009648} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.929487] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e98b7970-e640-42ad-803c-8dc47e2faf2c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.936737] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1162.936737] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]524f5117-6e6f-659f-b134-f37fb22bf2e4" [ 1162.936737] env[62974]: _type = "Task" [ 1162.936737] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.946213] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524f5117-6e6f-659f-b134-f37fb22bf2e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.952854] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updated VIF entry in instance network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.953238] env[62974]: DEBUG nova.network.neutron [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.038166] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8781597-a3a1-4d12-aae9-deda28cb1d96 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.046111] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92002728-0dd3-4d1e-bff7-1e4f6ae6d9d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.077434] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4e8ec2-fd68-4f80-8470-2fddbd957a3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.084474] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489507a1-be4d-4585-ab3e-a70d2ccecced {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.097540] env[62974]: DEBUG nova.compute.provider_tree [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.278589] env[62974]: DEBUG oslo_concurrency.lockutils [req-f21548a4-f7f1-4c9c-8069-d0a87706fc1b req-933aeb67-76d0-45ac-98a7-2dbb7830f84f service nova] Releasing lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.447892] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]524f5117-6e6f-659f-b134-f37fb22bf2e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010593} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.448189] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.448404] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 255a1d01-e007-45e5-a2c9-798223f41b30/255a1d01-e007-45e5-a2c9-798223f41b30.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1163.448649] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdfcfffd-6829-41c3-a6a5-5a13683f3e48 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.456425] env[62974]: DEBUG oslo_concurrency.lockutils [req-f02fdad8-19aa-48d0-8f60-00f10ca3e379 req-ca9de4bf-28a6-4fba-99b1-75be17963df7 service nova] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.456844] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1163.456844] env[62974]: value = "task-2655167" [ 1163.456844] env[62974]: _type = "Task" [ 1163.456844] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.464578] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.600284] env[62974]: DEBUG nova.scheduler.client.report [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.967328] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655167, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.105537] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.152s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.105947] env[62974]: DEBUG nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1164.468336] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556996} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.468692] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 255a1d01-e007-45e5-a2c9-798223f41b30/255a1d01-e007-45e5-a2c9-798223f41b30.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1164.468790] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1164.469034] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5df2243-bcca-4682-9327-51878129b048 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.476795] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1164.476795] env[62974]: value = "task-2655168" [ 1164.476795] env[62974]: _type = "Task" [ 1164.476795] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.486451] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.612106] env[62974]: DEBUG nova.compute.utils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1164.613567] env[62974]: DEBUG nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1164.613866] env[62974]: DEBUG nova.network.neutron [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.652449] env[62974]: DEBUG nova.policy [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cec2a1d8e95e477982a6f03cbf2d97f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a64d9b15664a85adf28d95c8cbbe35', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.924534] env[62974]: DEBUG nova.network.neutron [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Successfully created port: b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1164.987395] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078882} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.987746] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1164.988583] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6252dd-2246-46cf-bb9d-f9054a8e2cad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.011175] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 255a1d01-e007-45e5-a2c9-798223f41b30/255a1d01-e007-45e5-a2c9-798223f41b30.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1165.011417] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a38c606-632d-4cfd-89df-313b2f800835 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.032159] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1165.032159] env[62974]: value = "task-2655169" [ 1165.032159] env[62974]: _type = "Task" [ 1165.032159] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.040645] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.117849] env[62974]: DEBUG nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1165.544838] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655169, 'name': ReconfigVM_Task, 'duration_secs': 0.319924} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.545219] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 255a1d01-e007-45e5-a2c9-798223f41b30/255a1d01-e007-45e5-a2c9-798223f41b30.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1165.546215] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d15a58db-f9a1-45b7-91de-8c0621978b14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.553985] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1165.553985] env[62974]: value = "task-2655170" [ 1165.553985] env[62974]: _type = "Task" [ 1165.553985] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.562233] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655170, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.623098] env[62974]: INFO nova.virt.block_device [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Booting with volume 787e16fc-37a4-4aee-b780-7e5c2733573d at /dev/sda [ 1165.659573] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5db3434a-77bb-4cdb-ad56-bd3229c4ec42 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.669802] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59fffc9-0cd8-4139-91bb-27e8c928f401 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.700052] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7b708b4-c097-4262-8cea-fda5d4c0e8f1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.709179] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a44863-1984-42f4-bbe3-7cf30dd15a00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.738390] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e937ad1b-685e-4655-9b44-e7ec1fe83c0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.744653] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7412aa7b-7be5-465e-98ba-161c455a71ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.757922] env[62974]: DEBUG nova.virt.block_device [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updating existing volume attachment record: 733e7fd9-8085-4e51-9505-ca0bbf235b26 {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1166.066878] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655170, 'name': Rename_Task, 'duration_secs': 0.150426} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.067191] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.067490] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-867607ed-c624-4d19-abcf-1abeec4b4aef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.075358] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1166.075358] env[62974]: value = "task-2655171" [ 1166.075358] env[62974]: _type = "Task" [ 1166.075358] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.084608] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.294409] env[62974]: DEBUG nova.compute.manager [req-cb1f0882-8e58-41aa-b5a8-161b6e74dc8c req-ed3adada-42f4-41fc-b940-0d4b1964fc0b service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Received event network-vif-plugged-b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1166.294636] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb1f0882-8e58-41aa-b5a8-161b6e74dc8c req-ed3adada-42f4-41fc-b940-0d4b1964fc0b service nova] Acquiring lock "a94cb966-5304-4484-8639-899d7211e8b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.294843] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb1f0882-8e58-41aa-b5a8-161b6e74dc8c req-ed3adada-42f4-41fc-b940-0d4b1964fc0b service nova] Lock "a94cb966-5304-4484-8639-899d7211e8b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.295967] env[62974]: DEBUG oslo_concurrency.lockutils [req-cb1f0882-8e58-41aa-b5a8-161b6e74dc8c req-ed3adada-42f4-41fc-b940-0d4b1964fc0b service nova] Lock "a94cb966-5304-4484-8639-899d7211e8b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.296250] env[62974]: DEBUG nova.compute.manager [req-cb1f0882-8e58-41aa-b5a8-161b6e74dc8c req-ed3adada-42f4-41fc-b940-0d4b1964fc0b service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] No waiting events found dispatching network-vif-plugged-b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1166.296353] env[62974]: WARNING nova.compute.manager [req-cb1f0882-8e58-41aa-b5a8-161b6e74dc8c req-ed3adada-42f4-41fc-b940-0d4b1964fc0b service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Received unexpected event network-vif-plugged-b7fa291b-8b45-466d-be06-2a15ad4d11e1 for instance with vm_state building and task_state block_device_mapping. [ 1166.390234] env[62974]: DEBUG nova.network.neutron [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Successfully updated port: b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1166.586336] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655171, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.892928] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquiring lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.893096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquired lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.893347] env[62974]: DEBUG nova.network.neutron [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1167.088880] env[62974]: DEBUG oslo_vmware.api [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655171, 'name': PowerOnVM_Task, 'duration_secs': 0.926357} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.089264] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1167.089605] env[62974]: INFO nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Took 7.42 seconds to spawn the instance on the hypervisor. [ 1167.089908] env[62974]: DEBUG nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1167.090922] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f5eeab-0484-4438-accc-fc730b23c9a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.428171] env[62974]: DEBUG nova.network.neutron [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1167.564111] env[62974]: DEBUG nova.network.neutron [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updating instance_info_cache with network_info: [{"id": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "address": "fa:16:3e:11:ff:0f", "network": {"id": "134f4388-2e2e-4368-a6f1-5f6590129ed5", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1250374975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a64d9b15664a85adf28d95c8cbbe35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fa291b-8b", "ovs_interfaceid": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.610928] env[62974]: INFO nova.compute.manager [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Took 12.17 seconds to build instance. [ 1167.843371] env[62974]: DEBUG nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1167.843962] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.844222] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.844383] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.844562] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.844709] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.844892] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.845151] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.845318] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.845487] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.845649] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.845818] env[62974]: DEBUG nova.virt.hardware [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.846706] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0558ddd6-1f1d-4fb4-9837-83c8c971d518 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.856704] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0c3475-5c3b-43e8-b5c9-5ea665cd1800 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.066866] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Releasing lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.067227] env[62974]: DEBUG nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance network_info: |[{"id": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "address": "fa:16:3e:11:ff:0f", "network": {"id": "134f4388-2e2e-4368-a6f1-5f6590129ed5", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1250374975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a64d9b15664a85adf28d95c8cbbe35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fa291b-8b", "ovs_interfaceid": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1168.067736] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:ff:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7fa291b-8b45-466d-be06-2a15ad4d11e1', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1168.075332] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Creating folder: Project (75a64d9b15664a85adf28d95c8cbbe35). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1168.075604] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c1a2a1d-967c-49d7-bd7c-f391319fd696 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.090839] env[62974]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1168.091015] env[62974]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62974) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1168.091366] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Folder already exists: Project (75a64d9b15664a85adf28d95c8cbbe35). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1168.091555] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Creating folder: Instances. Parent ref: group-v535500. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1168.091863] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a1c5a7c-a560-4a05-bb2a-88723e39b39c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.102209] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Created folder: Instances in parent group-v535500. [ 1168.102462] env[62974]: DEBUG oslo.service.loopingcall [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1168.102666] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1168.102900] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79f66115-5542-4d67-ab66-c8ec653a2dc8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.117553] env[62974]: DEBUG oslo_concurrency.lockutils [None req-df347ba3-6bf1-4499-beec-9fc28148f8f1 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.687s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.123478] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1168.123478] env[62974]: value = "task-2655174" [ 1168.123478] env[62974]: _type = "Task" [ 1168.123478] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.132098] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655174, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.322044] env[62974]: DEBUG nova.compute.manager [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Received event network-changed-b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1168.322141] env[62974]: DEBUG nova.compute.manager [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Refreshing instance network info cache due to event network-changed-b7fa291b-8b45-466d-be06-2a15ad4d11e1. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1168.322395] env[62974]: DEBUG oslo_concurrency.lockutils [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] Acquiring lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.322546] env[62974]: DEBUG oslo_concurrency.lockutils [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] Acquired lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.322713] env[62974]: DEBUG nova.network.neutron [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Refreshing network info cache for port b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.619101] env[62974]: DEBUG nova.compute.manager [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Received event network-changed-c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1168.619101] env[62974]: DEBUG nova.compute.manager [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Refreshing instance network info cache due to event network-changed-c013e4f0-4d15-4230-bcb4-15cbadf79757. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1168.619101] env[62974]: DEBUG oslo_concurrency.lockutils [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] Acquiring lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.619101] env[62974]: DEBUG oslo_concurrency.lockutils [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] Acquired lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.619442] env[62974]: DEBUG nova.network.neutron [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Refreshing network info cache for port c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.639464] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655174, 'name': CreateVM_Task, 'duration_secs': 0.496811} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.639643] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1168.640371] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'guest_format': None, 'device_type': None, 'boot_index': 0, 'attachment_id': '733e7fd9-8085-4e51-9505-ca0bbf235b26', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535504', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'name': 'volume-787e16fc-37a4-4aee-b780-7e5c2733573d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a94cb966-5304-4484-8639-899d7211e8b6', 'attached_at': '', 'detached_at': '', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'serial': '787e16fc-37a4-4aee-b780-7e5c2733573d'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62974) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1168.640588] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Root volume attach. Driver type: vmdk {{(pid=62974) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1168.641403] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2598af9-4cf6-4027-a4bd-239a29a3fe94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.649834] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93211dfc-3bc0-4ec1-a91b-17e734387de2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.656613] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723ef98c-67b3-46d2-a521-465db2304a0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.663082] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-1feec26d-2230-4ab5-8552-b4ce473f04b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.670894] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1168.670894] env[62974]: value = "task-2655175" [ 1168.670894] env[62974]: _type = "Task" [ 1168.670894] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.686736] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655175, 'name': RelocateVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.013150] env[62974]: DEBUG nova.network.neutron [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updated VIF entry in instance network info cache for port b7fa291b-8b45-466d-be06-2a15ad4d11e1. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.013558] env[62974]: DEBUG nova.network.neutron [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updating instance_info_cache with network_info: [{"id": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "address": "fa:16:3e:11:ff:0f", "network": {"id": "134f4388-2e2e-4368-a6f1-5f6590129ed5", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1250374975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a64d9b15664a85adf28d95c8cbbe35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fa291b-8b", "ovs_interfaceid": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.186610] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655175, 'name': RelocateVM_Task, 'duration_secs': 0.029169} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.186610] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1169.186610] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535504', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'name': 'volume-787e16fc-37a4-4aee-b780-7e5c2733573d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a94cb966-5304-4484-8639-899d7211e8b6', 'attached_at': '', 'detached_at': '', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'serial': '787e16fc-37a4-4aee-b780-7e5c2733573d'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1169.187456] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e4c71b-9257-418a-9d9f-e494782e08c4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.207069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df24f7b-c37f-41fa-a1ac-0211a7e2e4b1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.230651] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] volume-787e16fc-37a4-4aee-b780-7e5c2733573d/volume-787e16fc-37a4-4aee-b780-7e5c2733573d.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1169.233181] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e50be0ed-f604-419f-b656-773666c483c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.254415] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1169.254415] env[62974]: value = "task-2655176" [ 1169.254415] env[62974]: _type = "Task" [ 1169.254415] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.262788] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655176, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.357372] env[62974]: DEBUG nova.network.neutron [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Updated VIF entry in instance network info cache for port c013e4f0-4d15-4230-bcb4-15cbadf79757. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.357809] env[62974]: DEBUG nova.network.neutron [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Updating instance_info_cache with network_info: [{"id": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "address": "fa:16:3e:81:48:5d", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc013e4f0-4d", "ovs_interfaceid": "c013e4f0-4d15-4230-bcb4-15cbadf79757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.516472] env[62974]: DEBUG oslo_concurrency.lockutils [req-f4011075-8fbf-4eee-853a-15668b72b40d req-3316e0fd-43f6-4ef4-9cb5-517fbb2c7d7a service nova] Releasing lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.765694] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655176, 'name': ReconfigVM_Task, 'duration_secs': 0.285616} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.766088] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Reconfigured VM instance instance-00000072 to attach disk [datastore2] volume-787e16fc-37a4-4aee-b780-7e5c2733573d/volume-787e16fc-37a4-4aee-b780-7e5c2733573d.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1169.770847] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0a351bb-739d-4551-835c-ea472de159f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.789042] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1169.789042] env[62974]: value = "task-2655177" [ 1169.789042] env[62974]: _type = "Task" [ 1169.789042] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.797795] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655177, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.860870] env[62974]: DEBUG oslo_concurrency.lockutils [req-8cc0551c-b110-4a44-815c-d08bf3415f48 req-ac3294b7-6c3e-4687-a5c6-69939a86b402 service nova] Releasing lock "refresh_cache-255a1d01-e007-45e5-a2c9-798223f41b30" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.299370] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655177, 'name': ReconfigVM_Task, 'duration_secs': 0.135019} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.299678] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535504', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'name': 'volume-787e16fc-37a4-4aee-b780-7e5c2733573d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a94cb966-5304-4484-8639-899d7211e8b6', 'attached_at': '', 'detached_at': '', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'serial': '787e16fc-37a4-4aee-b780-7e5c2733573d'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1170.300226] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff922374-0e75-4879-a541-af2918018ffd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.307681] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1170.307681] env[62974]: value = "task-2655178" [ 1170.307681] env[62974]: _type = "Task" [ 1170.307681] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.315876] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655178, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.819833] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655178, 'name': Rename_Task, 'duration_secs': 0.150804} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.820201] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1170.820339] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab9f94d0-316c-483d-af06-ac783f1e5ba6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.827349] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1170.827349] env[62974]: value = "task-2655179" [ 1170.827349] env[62974]: _type = "Task" [ 1170.827349] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.834618] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.337958] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655179, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.838101] env[62974]: DEBUG oslo_vmware.api [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655179, 'name': PowerOnVM_Task, 'duration_secs': 0.582971} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.838479] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1171.838606] env[62974]: INFO nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Took 3.99 seconds to spawn the instance on the hypervisor. [ 1171.838744] env[62974]: DEBUG nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1171.839522] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd3adeb-9da0-4738-99cd-cc3ffebe0ffb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.357956] env[62974]: INFO nova.compute.manager [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Took 10.78 seconds to build instance. [ 1172.822844] env[62974]: DEBUG nova.compute.manager [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Received event network-changed-b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1172.823062] env[62974]: DEBUG nova.compute.manager [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Refreshing instance network info cache due to event network-changed-b7fa291b-8b45-466d-be06-2a15ad4d11e1. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1172.823284] env[62974]: DEBUG oslo_concurrency.lockutils [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] Acquiring lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1172.823670] env[62974]: DEBUG oslo_concurrency.lockutils [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] Acquired lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.823670] env[62974]: DEBUG nova.network.neutron [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Refreshing network info cache for port b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1172.859432] env[62974]: DEBUG oslo_concurrency.lockutils [None req-404241d8-e4e0-4286-b2e6-f4c1baa3c99a tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "a94cb966-5304-4484-8639-899d7211e8b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.290s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.534042] env[62974]: DEBUG nova.network.neutron [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updated VIF entry in instance network info cache for port b7fa291b-8b45-466d-be06-2a15ad4d11e1. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.534441] env[62974]: DEBUG nova.network.neutron [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updating instance_info_cache with network_info: [{"id": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "address": "fa:16:3e:11:ff:0f", "network": {"id": "134f4388-2e2e-4368-a6f1-5f6590129ed5", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1250374975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a64d9b15664a85adf28d95c8cbbe35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fa291b-8b", "ovs_interfaceid": "b7fa291b-8b45-466d-be06-2a15ad4d11e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.037513] env[62974]: DEBUG oslo_concurrency.lockutils [req-7905b7a1-6885-4db7-976d-a906b0295e7e req-f670cf80-a2a6-4278-a711-2b1666adecc8 service nova] Releasing lock "refresh_cache-a94cb966-5304-4484-8639-899d7211e8b6" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.648055] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-d86538f3-95e0-40bc-af76-c59c630febac" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.648430] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-d86538f3-95e0-40bc-af76-c59c630febac" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.648689] env[62974]: DEBUG nova.objects.instance [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'flavor' on Instance uuid d7ca15a3-edd2-48a2-9ee0-5d2072f1310a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.241720] env[62974]: DEBUG nova.objects.instance [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'pci_requests' on Instance uuid d7ca15a3-edd2-48a2-9ee0-5d2072f1310a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.744796] env[62974]: DEBUG nova.objects.base [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1179.745167] env[62974]: DEBUG nova.network.neutron [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1179.804253] env[62974]: DEBUG nova.policy [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1181.340026] env[62974]: DEBUG nova.compute.manager [req-76546ef4-05ca-4b83-badf-da04af6e85e4 req-b64c5dc8-ec21-4337-92c4-06933d43f64a service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-vif-plugged-d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1181.340281] env[62974]: DEBUG oslo_concurrency.lockutils [req-76546ef4-05ca-4b83-badf-da04af6e85e4 req-b64c5dc8-ec21-4337-92c4-06933d43f64a service nova] Acquiring lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.340547] env[62974]: DEBUG oslo_concurrency.lockutils [req-76546ef4-05ca-4b83-badf-da04af6e85e4 req-b64c5dc8-ec21-4337-92c4-06933d43f64a service nova] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.340624] env[62974]: DEBUG oslo_concurrency.lockutils [req-76546ef4-05ca-4b83-badf-da04af6e85e4 req-b64c5dc8-ec21-4337-92c4-06933d43f64a service nova] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.340779] env[62974]: DEBUG nova.compute.manager [req-76546ef4-05ca-4b83-badf-da04af6e85e4 req-b64c5dc8-ec21-4337-92c4-06933d43f64a service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] No waiting events found dispatching network-vif-plugged-d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1181.340961] env[62974]: WARNING nova.compute.manager [req-76546ef4-05ca-4b83-badf-da04af6e85e4 req-b64c5dc8-ec21-4337-92c4-06933d43f64a service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received unexpected event network-vif-plugged-d86538f3-95e0-40bc-af76-c59c630febac for instance with vm_state active and task_state None. [ 1181.413721] env[62974]: DEBUG nova.network.neutron [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Successfully updated port: d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1181.919239] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.919413] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.919593] env[62974]: DEBUG nova.network.neutron [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1182.455218] env[62974]: WARNING nova.network.neutron [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] ad86c457-3431-4c60-bde9-ddba2b588dde already exists in list: networks containing: ['ad86c457-3431-4c60-bde9-ddba2b588dde']. ignoring it [ 1182.709893] env[62974]: DEBUG nova.network.neutron [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86538f3-95e0-40bc-af76-c59c630febac", "address": "fa:16:3e:8b:aa:1f", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86538f3-95", "ovs_interfaceid": "d86538f3-95e0-40bc-af76-c59c630febac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.213170] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.213814] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.213981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.215352] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7f4d1e-a840-49fa-8e68-8bde68280b53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.232413] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.232641] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.232784] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.232960] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.233124] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.233271] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.233470] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.233626] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.233790] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.233950] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.234194] env[62974]: DEBUG nova.virt.hardware [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.240392] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Reconfiguring VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1183.240987] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecbfdaac-652a-40ac-8ea6-a252c9a29e1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.260243] env[62974]: DEBUG oslo_vmware.api [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1183.260243] env[62974]: value = "task-2655180" [ 1183.260243] env[62974]: _type = "Task" [ 1183.260243] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.267951] env[62974]: DEBUG oslo_vmware.api [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655180, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.372445] env[62974]: DEBUG nova.compute.manager [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-changed-d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1183.372646] env[62974]: DEBUG nova.compute.manager [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing instance network info cache due to event network-changed-d86538f3-95e0-40bc-af76-c59c630febac. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1183.372859] env[62974]: DEBUG oslo_concurrency.lockutils [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.373017] env[62974]: DEBUG oslo_concurrency.lockutils [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.373189] env[62974]: DEBUG nova.network.neutron [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing network info cache for port d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1183.770721] env[62974]: DEBUG oslo_vmware.api [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655180, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.059015] env[62974]: DEBUG nova.network.neutron [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updated VIF entry in instance network info cache for port d86538f3-95e0-40bc-af76-c59c630febac. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1184.059474] env[62974]: DEBUG nova.network.neutron [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86538f3-95e0-40bc-af76-c59c630febac", "address": "fa:16:3e:8b:aa:1f", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86538f3-95", "ovs_interfaceid": "d86538f3-95e0-40bc-af76-c59c630febac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.272504] env[62974]: DEBUG oslo_vmware.api [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655180, 'name': ReconfigVM_Task, 'duration_secs': 0.55083} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.272979] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.273217] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Reconfigured VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1184.561961] env[62974]: DEBUG oslo_concurrency.lockutils [req-e0b9f1e3-0109-472f-9d83-c45af983d40c req-ff0bd9d1-6bf9-4441-88a2-213f98277435 service nova] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.778444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aa160d66-cb97-4237-87b0-2f7a11b4f0c7 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-d86538f3-95e0-40bc-af76-c59c630febac" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.130s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.040305] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.040560] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.040744] env[62974]: INFO nova.compute.manager [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Shelving [ 1186.050874] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.051206] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36e7076e-fe4d-4738-a499-be39c9aae2ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.059104] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1186.059104] env[62974]: value = "task-2655181" [ 1186.059104] env[62974]: _type = "Task" [ 1186.059104] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.067160] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.371086] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-d86538f3-95e0-40bc-af76-c59c630febac" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.371312] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-d86538f3-95e0-40bc-af76-c59c630febac" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.569557] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655181, 'name': PowerOffVM_Task, 'duration_secs': 0.226593} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.569815] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1186.570595] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069a75e3-490e-4a9a-a5cf-9d149d71cb1b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.588806] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204f6be2-4ac0-4940-a9b1-a66aaced8853 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.873941] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1186.874130] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.875232] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3813e29a-4353-4016-8247-a8f2bec5775a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.895149] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3abc0c-efaa-409e-a5d5-b999641ace47 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.919891] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Reconfiguring VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1186.920164] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d55cf38c-fd04-464b-b8b4-42a59a1916ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.937654] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1186.937654] env[62974]: value = "task-2655182" [ 1186.937654] env[62974]: _type = "Task" [ 1186.937654] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.944815] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.046717] env[62974]: DEBUG nova.compute.manager [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Stashing vm_state: active {{(pid=62974) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1187.098687] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Creating Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1187.099049] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-944545da-f868-4bd5-9fd3-0419d68720e3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.106729] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1187.106729] env[62974]: value = "task-2655183" [ 1187.106729] env[62974]: _type = "Task" [ 1187.106729] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.114711] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655183, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.447680] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.563658] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.563822] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.617039] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655183, 'name': CreateSnapshot_Task, 'duration_secs': 0.412485} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.617257] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Created Snapshot of the VM instance {{(pid=62974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1187.617965] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a515d74-9ee2-4e77-be2a-cdad1d7fae7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.947621] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.068566] env[62974]: INFO nova.compute.claims [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.136445] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Creating linked-clone VM from snapshot {{(pid=62974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1188.136763] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-40b823dc-9bf6-4341-bc8d-d7dacb5ea53a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.145408] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1188.145408] env[62974]: value = "task-2655184" [ 1188.145408] env[62974]: _type = "Task" [ 1188.145408] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.155500] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655184, 'name': CloneVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.449257] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.575309] env[62974]: INFO nova.compute.resource_tracker [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating resource usage from migration 5e2cc16c-f94e-4065-a882-659d452276bd [ 1188.655582] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655184, 'name': CloneVM_Task} progress is 94%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.668847] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec92b621-c8c3-483c-8da1-8ee8e9a20e89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.675667] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8bb37b-83de-4023-a806-eedc3bd805ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.704855] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b3b4fa-ec53-450c-9ae3-bc6c597beddf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.711610] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80731c00-5197-41a4-9188-7258b19e86f2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.724265] env[62974]: DEBUG nova.compute.provider_tree [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.950507] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.155962] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655184, 'name': CloneVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.227162] env[62974]: DEBUG nova.scheduler.client.report [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.452210] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.656859] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655184, 'name': CloneVM_Task, 'duration_secs': 1.078954} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.657179] env[62974]: INFO nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Created linked-clone VM from snapshot [ 1189.657919] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b8f239-6780-4292-b9ec-068c905f8ad9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.664869] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Uploading image ccd419f6-57a7-45a7-8f37-9936619bcffe {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1189.692190] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1189.692190] env[62974]: value = "vm-535510" [ 1189.692190] env[62974]: _type = "VirtualMachine" [ 1189.692190] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1189.692465] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0d5f23c9-e512-4664-9e44-acb0f8eeb7b9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.699154] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease: (returnval){ [ 1189.699154] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd87eb-1b46-42b6-e4df-b31b5dc06b36" [ 1189.699154] env[62974]: _type = "HttpNfcLease" [ 1189.699154] env[62974]: } obtained for exporting VM: (result){ [ 1189.699154] env[62974]: value = "vm-535510" [ 1189.699154] env[62974]: _type = "VirtualMachine" [ 1189.699154] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1189.699408] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the lease: (returnval){ [ 1189.699408] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd87eb-1b46-42b6-e4df-b31b5dc06b36" [ 1189.699408] env[62974]: _type = "HttpNfcLease" [ 1189.699408] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1189.705327] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1189.705327] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd87eb-1b46-42b6-e4df-b31b5dc06b36" [ 1189.705327] env[62974]: _type = "HttpNfcLease" [ 1189.705327] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1189.735570] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.172s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.735760] env[62974]: INFO nova.compute.manager [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Migrating [ 1189.951063] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.207781] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1190.207781] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd87eb-1b46-42b6-e4df-b31b5dc06b36" [ 1190.207781] env[62974]: _type = "HttpNfcLease" [ 1190.207781] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1190.208226] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1190.208226] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd87eb-1b46-42b6-e4df-b31b5dc06b36" [ 1190.208226] env[62974]: _type = "HttpNfcLease" [ 1190.208226] env[62974]: }. {{(pid=62974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1190.208766] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6b74a1-1cb0-49f1-a037-81804eac4794 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.215938] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ea854-f0b3-30cd-6efb-6baee945e77d/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1190.216136] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ea854-f0b3-30cd-6efb-6baee945e77d/disk-0.vmdk for reading. {{(pid=62974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1190.271909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.272125] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.272312] env[62974]: DEBUG nova.network.neutron [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.303642] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e9c41e4e-d64d-416a-ac20-6dca6909b82b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.452184] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.953749] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.995366] env[62974]: DEBUG nova.network.neutron [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.459251] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.500582] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.956545] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.455603] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.956685] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.017468] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e4534d-625b-488b-aab0-a0c836f4c37c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.038534] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance '744a685d-845e-4818-abb5-c70056fd4cd0' progress to 0 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1193.456513] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.545627] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.545958] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69a4e0f5-3ef3-481b-bebb-df44034cd874 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.554058] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1193.554058] env[62974]: value = "task-2655186" [ 1193.554058] env[62974]: _type = "Task" [ 1193.554058] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.562041] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655186, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.957928] env[62974]: DEBUG oslo_vmware.api [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655182, 'name': ReconfigVM_Task, 'duration_secs': 6.821382} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.958358] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.958358] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Reconfigured VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1194.063195] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655186, 'name': PowerOffVM_Task, 'duration_secs': 0.277592} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.063527] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.063675] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance '744a685d-845e-4818-abb5-c70056fd4cd0' progress to 17 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1194.573501] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1194.573768] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.573912] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1194.574536] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.574536] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1194.574536] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1194.574746] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1194.574746] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1194.574898] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1194.575074] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1194.575250] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1194.580332] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7ed2e74-72ec-4523-8cad-0cc1b2501da3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.596632] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1194.596632] env[62974]: value = "task-2655187" [ 1194.596632] env[62974]: _type = "Task" [ 1194.596632] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.604473] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.106975] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655187, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.262717] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.262961] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.263103] env[62974]: DEBUG nova.network.neutron [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1195.608233] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655187, 'name': ReconfigVM_Task, 'duration_secs': 0.544307} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.608601] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance '744a685d-845e-4818-abb5-c70056fd4cd0' progress to 33 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1195.961635] env[62974]: INFO nova.network.neutron [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Port d86538f3-95e0-40bc-af76-c59c630febac from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1195.962184] env[62974]: DEBUG nova.network.neutron [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.089602] env[62974]: DEBUG nova.compute.manager [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1196.089829] env[62974]: DEBUG nova.compute.manager [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing instance network info cache due to event network-changed-3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1196.090336] env[62974]: DEBUG oslo_concurrency.lockutils [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] Acquiring lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.115535] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1196.115911] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.115983] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1196.116195] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.116359] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1196.116509] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1196.116711] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1196.116884] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1196.117194] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1196.117403] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1196.117721] env[62974]: DEBUG nova.virt.hardware [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1196.123291] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1196.124030] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a14cac9e-5071-4fe4-ad4c-d955430a08e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.144505] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1196.144505] env[62974]: value = "task-2655188" [ 1196.144505] env[62974]: _type = "Task" [ 1196.144505] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.153637] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655188, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.465515] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.467906] env[62974]: DEBUG oslo_concurrency.lockutils [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] Acquired lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.468179] env[62974]: DEBUG nova.network.neutron [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Refreshing network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.653805] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655188, 'name': ReconfigVM_Task, 'duration_secs': 0.225866} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.655039] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1196.655039] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6120c1-0b79-4a7e-b180-187eba9b862c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.676732] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1196.676982] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-057d5d0b-8919-484e-a0e8-87f8cc93cd53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.696712] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1196.696712] env[62974]: value = "task-2655189" [ 1196.696712] env[62974]: _type = "Task" [ 1196.696712] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.707948] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.774999] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-007a5e28-7891-4327-ba39-bb9da8e32495-d86538f3-95e0-40bc-af76-c59c630febac" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.775319] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-007a5e28-7891-4327-ba39-bb9da8e32495-d86538f3-95e0-40bc-af76-c59c630febac" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.775820] env[62974]: DEBUG nova.objects.instance [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'flavor' on Instance uuid 007a5e28-7891-4327-ba39-bb9da8e32495 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.971357] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6db810cb-6ab7-477a-897f-eae34fed8ed8 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-d86538f3-95e0-40bc-af76-c59c630febac" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.600s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.165965] env[62974]: DEBUG nova.network.neutron [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updated VIF entry in instance network info cache for port 3130f1da-8f58-4210-ac5f-966ca6592a53. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1197.167043] env[62974]: DEBUG nova.network.neutron [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [{"id": "3130f1da-8f58-4210-ac5f-966ca6592a53", "address": "fa:16:3e:77:4c:d9", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3130f1da-8f", "ovs_interfaceid": "3130f1da-8f58-4210-ac5f-966ca6592a53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.206709] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655189, 'name': ReconfigVM_Task, 'duration_secs': 0.432785} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.208047] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1197.208047] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance '744a685d-845e-4818-abb5-c70056fd4cd0' progress to 50 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.370143] env[62974]: DEBUG nova.objects.instance [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'pci_requests' on Instance uuid 007a5e28-7891-4327-ba39-bb9da8e32495 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.672288] env[62974]: DEBUG oslo_concurrency.lockutils [req-14d86bdb-d557-4b66-acb3-023f293e9f47 req-03eb6d14-6534-4407-a754-67ad9c9fd1b3 service nova] Releasing lock "refresh_cache-d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.714199] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91301cb-a06e-4aec-9795-96d05bd7ee50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.733666] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bfb1d3-c85a-4234-b25d-22cacaf277ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.751779] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance '744a685d-845e-4818-abb5-c70056fd4cd0' progress to 67 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.872871] env[62974]: DEBUG nova.objects.base [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Object Instance<007a5e28-7891-4327-ba39-bb9da8e32495> lazy-loaded attributes: flavor,pci_requests {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1197.873061] env[62974]: DEBUG nova.network.neutron [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1197.937999] env[62974]: DEBUG nova.policy [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc0c43adba0745d0af668e0bfeb015e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f990de0bcb0403195a272efcc0e104c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1197.982562] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ea854-f0b3-30cd-6efb-6baee945e77d/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1197.983507] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e276af6-66bd-4147-bf54-9a5a98e1f2df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.989617] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ea854-f0b3-30cd-6efb-6baee945e77d/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1197.989782] env[62974]: ERROR oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ea854-f0b3-30cd-6efb-6baee945e77d/disk-0.vmdk due to incomplete transfer. [ 1197.989984] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d893e57e-11c3-456b-85e6-aee20b57da6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.996759] env[62974]: DEBUG oslo_vmware.rw_handles [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ea854-f0b3-30cd-6efb-6baee945e77d/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1197.996956] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Uploaded image ccd419f6-57a7-45a7-8f37-9936619bcffe to the Glance image server {{(pid=62974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1197.999223] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Destroying the VM {{(pid=62974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1197.999450] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a04fa5be-2e58-4d76-b139-634cf24d0665 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.004574] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1198.004574] env[62974]: value = "task-2655190" [ 1198.004574] env[62974]: _type = "Task" [ 1198.004574] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.012888] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655190, 'name': Destroy_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.176036] env[62974]: DEBUG nova.compute.manager [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1198.176272] env[62974]: DEBUG nova.compute.manager [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing instance network info cache due to event network-changed-84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1198.176405] env[62974]: DEBUG oslo_concurrency.lockutils [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.176575] env[62974]: DEBUG oslo_concurrency.lockutils [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.176684] env[62974]: DEBUG nova.network.neutron [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1198.286908] env[62974]: DEBUG nova.network.neutron [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Port b715d91d-19dc-4ecd-9d75-e57c620d897a binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1198.514864] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655190, 'name': Destroy_Task, 'duration_secs': 0.324448} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.515142] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Destroyed the VM [ 1198.515380] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Deleting Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1198.515626] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0e8694b6-3f1e-4af9-8bd8-132c87799099 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.521067] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1198.521067] env[62974]: value = "task-2655191" [ 1198.521067] env[62974]: _type = "Task" [ 1198.521067] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.529755] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655191, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.883198] env[62974]: DEBUG nova.network.neutron [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updated VIF entry in instance network info cache for port 84fb0bcd-a98e-4006-bc29-19f86ad7822d. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1198.883615] env[62974]: DEBUG nova.network.neutron [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.031730] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655191, 'name': RemoveSnapshot_Task, 'duration_secs': 0.34404} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.031999] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Deleted Snapshot of the VM instance {{(pid=62974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1199.032317] env[62974]: DEBUG nova.compute.manager [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1199.033102] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b39b7a5-6e7b-48fd-b79b-13279e04f8a5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.305255] env[62974]: DEBUG nova.compute.manager [req-980c2dfb-1b5f-45f0-8efc-16102434a1af req-fff7ae73-23db-481b-9ce9-40c6b08e40be service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-vif-plugged-d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1199.305544] env[62974]: DEBUG oslo_concurrency.lockutils [req-980c2dfb-1b5f-45f0-8efc-16102434a1af req-fff7ae73-23db-481b-9ce9-40c6b08e40be service nova] Acquiring lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.305677] env[62974]: DEBUG oslo_concurrency.lockutils [req-980c2dfb-1b5f-45f0-8efc-16102434a1af req-fff7ae73-23db-481b-9ce9-40c6b08e40be service nova] Lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.305846] env[62974]: DEBUG oslo_concurrency.lockutils [req-980c2dfb-1b5f-45f0-8efc-16102434a1af req-fff7ae73-23db-481b-9ce9-40c6b08e40be service nova] Lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.306035] env[62974]: DEBUG nova.compute.manager [req-980c2dfb-1b5f-45f0-8efc-16102434a1af req-fff7ae73-23db-481b-9ce9-40c6b08e40be service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] No waiting events found dispatching network-vif-plugged-d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1199.306196] env[62974]: WARNING nova.compute.manager [req-980c2dfb-1b5f-45f0-8efc-16102434a1af req-fff7ae73-23db-481b-9ce9-40c6b08e40be service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received unexpected event network-vif-plugged-d86538f3-95e0-40bc-af76-c59c630febac for instance with vm_state active and task_state None. [ 1199.311593] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.311801] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.311967] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.383046] env[62974]: DEBUG nova.network.neutron [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Successfully updated port: d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1199.386480] env[62974]: DEBUG oslo_concurrency.lockutils [req-a8813545-6d16-431f-bdd5-f57bbfa3b47d req-6164980c-9c61-4390-816d-e20549c1f0d5 service nova] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.544372] env[62974]: INFO nova.compute.manager [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Shelve offloading [ 1199.886044] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.886286] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.886343] env[62974]: DEBUG nova.network.neutron [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.048470] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.048778] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c1c9a8f-42ab-4733-a775-d65a15b74808 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.056551] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1200.056551] env[62974]: value = "task-2655192" [ 1200.056551] env[62974]: _type = "Task" [ 1200.056551] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.066902] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1200.067137] env[62974]: DEBUG nova.compute.manager [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1200.067999] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6775ae26-9b6a-4128-969c-30d793422bee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.073646] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.073805] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.073990] env[62974]: DEBUG nova.network.neutron [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.347420] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.347665] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.347778] env[62974]: DEBUG nova.network.neutron [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.421875] env[62974]: WARNING nova.network.neutron [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] ad86c457-3431-4c60-bde9-ddba2b588dde already exists in list: networks containing: ['ad86c457-3431-4c60-bde9-ddba2b588dde']. ignoring it [ 1200.758951] env[62974]: DEBUG nova.network.neutron [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86538f3-95e0-40bc-af76-c59c630febac", "address": "fa:16:3e:8b:aa:1f", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86538f3-95", "ovs_interfaceid": "d86538f3-95e0-40bc-af76-c59c630febac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.827096] env[62974]: DEBUG nova.network.neutron [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.031057] env[62974]: DEBUG nova.network.neutron [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.261597] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.262303] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.262468] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.263365] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667b59a2-785b-4be9-91de-f9038361d266 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.280527] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.280743] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.280898] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.281093] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.281243] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.281395] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.281590] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.281743] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.281905] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.282085] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.282296] env[62974]: DEBUG nova.virt.hardware [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.288521] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Reconfiguring VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1201.288794] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-436a3f41-0c96-4a1f-b834-8fdfa7e5250c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.306265] env[62974]: DEBUG oslo_vmware.api [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1201.306265] env[62974]: value = "task-2655193" [ 1201.306265] env[62974]: _type = "Task" [ 1201.306265] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.313919] env[62974]: DEBUG oslo_vmware.api [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655193, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.324985] env[62974]: DEBUG nova.compute.manager [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-changed-d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1201.325160] env[62974]: DEBUG nova.compute.manager [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing instance network info cache due to event network-changed-d86538f3-95e0-40bc-af76-c59c630febac. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1201.325377] env[62974]: DEBUG oslo_concurrency.lockutils [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.325528] env[62974]: DEBUG oslo_concurrency.lockutils [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.325689] env[62974]: DEBUG nova.network.neutron [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Refreshing network info cache for port d86538f3-95e0-40bc-af76-c59c630febac {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1201.329444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.533699] env[62974]: DEBUG oslo_concurrency.lockutils [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.640711] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.641679] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635c4343-12b8-4e04-9d99-e69ff9c1b663 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.651020] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.651020] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b11d52f-e0a7-4572-82e7-02611bb6b0bf {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.741072] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.741072] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.741072] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleting the datastore file [datastore1] 521b463f-98f9-4365-b446-5de9af79f220 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.741452] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-700ffde1-a992-46b8-90d3-db2797d16078 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.748418] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1201.748418] env[62974]: value = "task-2655195" [ 1201.748418] env[62974]: _type = "Task" [ 1201.748418] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.758946] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.816467] env[62974]: DEBUG oslo_vmware.api [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655193, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.015575] env[62974]: DEBUG nova.network.neutron [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updated VIF entry in instance network info cache for port d86538f3-95e0-40bc-af76-c59c630febac. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1202.016093] env[62974]: DEBUG nova.network.neutron [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86538f3-95e0-40bc-af76-c59c630febac", "address": "fa:16:3e:8b:aa:1f", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86538f3-95", "ovs_interfaceid": "d86538f3-95e0-40bc-af76-c59c630febac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.059260] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa3bdcb-f4eb-4e18-bb91-8674fdeb98f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.079111] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a394fb0e-0045-4cc5-9c26-41c9510ceb91 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.086081] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance '744a685d-845e-4818-abb5-c70056fd4cd0' progress to 83 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1202.258058] env[62974]: DEBUG oslo_vmware.api [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142209} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.258358] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.258550] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.258718] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.276967] env[62974]: INFO nova.scheduler.client.report [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted allocations for instance 521b463f-98f9-4365-b446-5de9af79f220 [ 1202.317344] env[62974]: DEBUG oslo_vmware.api [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655193, 'name': ReconfigVM_Task, 'duration_secs': 0.579556} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.317842] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.318089] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Reconfigured VM to attach interface {{(pid=62974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1202.518968] env[62974]: DEBUG oslo_concurrency.lockutils [req-124679f7-3a9d-4243-a21e-354770fb1f80 req-c5457007-58ca-451d-bd8e-59a207e1fd85 service nova] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.592152] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1202.592812] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c6f9a23-854a-45fb-bca4-0a32de6ce75b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.601987] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1202.601987] env[62974]: value = "task-2655196" [ 1202.601987] env[62974]: _type = "Task" [ 1202.601987] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.610255] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.781764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.782134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.782420] env[62974]: DEBUG nova.objects.instance [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'resources' on Instance uuid 521b463f-98f9-4365-b446-5de9af79f220 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.822270] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b512588b-2d3d-4a8f-9972-bef4d2abce54 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-007a5e28-7891-4327-ba39-bb9da8e32495-d86538f3-95e0-40bc-af76-c59c630febac" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.047s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.112536] env[62974]: DEBUG oslo_vmware.api [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655196, 'name': PowerOnVM_Task, 'duration_secs': 0.386357} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.112759] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1203.112949] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-5f53350a-c884-4810-b6ad-924069999823 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance '744a685d-845e-4818-abb5-c70056fd4cd0' progress to 100 {{(pid=62974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.285558] env[62974]: DEBUG nova.objects.instance [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'numa_topology' on Instance uuid 521b463f-98f9-4365-b446-5de9af79f220 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.350447] env[62974]: DEBUG nova.compute.manager [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-vif-unplugged-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1203.350447] env[62974]: DEBUG oslo_concurrency.lockutils [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.350620] env[62974]: DEBUG oslo_concurrency.lockutils [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.350681] env[62974]: DEBUG oslo_concurrency.lockutils [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.350827] env[62974]: DEBUG nova.compute.manager [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] No waiting events found dispatching network-vif-unplugged-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1203.350999] env[62974]: WARNING nova.compute.manager [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received unexpected event network-vif-unplugged-6214f8c1-1172-4dbd-b021-d468e0b04110 for instance with vm_state shelved_offloaded and task_state None. [ 1203.351178] env[62974]: DEBUG nova.compute.manager [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1203.351335] env[62974]: DEBUG nova.compute.manager [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing instance network info cache due to event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1203.351522] env[62974]: DEBUG oslo_concurrency.lockutils [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.351692] env[62974]: DEBUG oslo_concurrency.lockutils [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.351863] env[62974]: DEBUG nova.network.neutron [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.790745] env[62974]: DEBUG nova.objects.base [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Object Instance<521b463f-98f9-4365-b446-5de9af79f220> lazy-loaded attributes: resources,numa_topology {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1203.870662] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5b73d2-7258-43d7-b4bf-1bcd0788317c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.878396] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383b27d2-b984-4c4b-b158-b6b6cb5c76df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.911024] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b00497f-d451-45f6-8ed6-3620d5a8c0ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.918674] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef308be-e175-40e4-b02b-52c56ea5d4c6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.932247] env[62974]: DEBUG nova.compute.provider_tree [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.125864] env[62974]: DEBUG nova.network.neutron [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updated VIF entry in instance network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.126210] env[62974]: DEBUG nova.network.neutron [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap6214f8c1-11", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.436181] env[62974]: DEBUG nova.scheduler.client.report [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.456997] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "interface-007a5e28-7891-4327-ba39-bb9da8e32495-d86538f3-95e0-40bc-af76-c59c630febac" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.457241] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-007a5e28-7891-4327-ba39-bb9da8e32495-d86538f3-95e0-40bc-af76-c59c630febac" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.628739] env[62974]: DEBUG oslo_concurrency.lockutils [req-0c6d9555-9564-4444-ab3e-447c8d96e9a0 req-5f76c2c5-60a9-4de0-b7a3-61e9066862d2 service nova] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.887421] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.940933] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.959533] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.959663] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.960451] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900c39a3-ed63-425f-b2c7-82247b7fb05b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.978837] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c20a3b2-41fa-42db-9b99-d2a669f5c880 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.006481] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Reconfiguring VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1205.006737] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5d723ab-4d64-471f-b510-1770b214a391 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.025603] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1205.025603] env[62974]: value = "task-2655197" [ 1205.025603] env[62974]: _type = "Task" [ 1205.025603] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.033423] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.409749] env[62974]: DEBUG nova.network.neutron [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Port b715d91d-19dc-4ecd-9d75-e57c620d897a binding to destination host cpu-1 is already ACTIVE {{(pid=62974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1205.410035] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.410198] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.410362] env[62974]: DEBUG nova.network.neutron [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1205.449735] env[62974]: DEBUG oslo_concurrency.lockutils [None req-6b41ef87-e835-4e71-b5b7-bb8861b20a2e tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.409s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.451239] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.564s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.451335] env[62974]: INFO nova.compute.manager [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Unshelving [ 1205.536074] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.035903] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.112486] env[62974]: DEBUG nova.network.neutron [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.478295] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.478571] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.478792] env[62974]: DEBUG nova.objects.instance [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'pci_requests' on Instance uuid 521b463f-98f9-4365-b446-5de9af79f220 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.536884] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.615198] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.982386] env[62974]: DEBUG nova.objects.instance [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lazy-loading 'numa_topology' on Instance uuid 521b463f-98f9-4365-b446-5de9af79f220 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.037286] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.118927] env[62974]: DEBUG nova.compute.manager [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62974) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1207.119251] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.485548] env[62974]: INFO nova.compute.claims [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1207.537296] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.697026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "255a1d01-e007-45e5-a2c9-798223f41b30" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.697026] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.037731] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.200365] env[62974]: DEBUG nova.compute.utils [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1208.537946] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.577168] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58db5937-6df7-4a38-b413-86e213d01d0a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.584009] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6892720-e3f3-4ee2-807b-14d8ece04d73 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.613519] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b67970-5aae-4ea8-a7dc-90d7aafa1182 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.619887] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e0e559-6f89-433e-9af6-c8fdd4f299de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.633608] env[62974]: DEBUG nova.compute.provider_tree [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.703215] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.039217] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.136619] env[62974]: DEBUG nova.scheduler.client.report [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1209.539550] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.642100] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.163s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.643898] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.525s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.671718] env[62974]: INFO nova.network.neutron [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating port 6214f8c1-1172-4dbd-b021-d468e0b04110 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1209.767968] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "255a1d01-e007-45e5-a2c9-798223f41b30" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.768241] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.768485] env[62974]: INFO nova.compute.manager [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Attaching volume fc927034-a8e9-49ad-b9a6-de33584632a9 to /dev/sdb [ 1209.798825] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ab4e73-6f24-4b99-8ff3-614180147269 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.806199] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce0d58e-5c73-4886-82c5-0d68c28b61c7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.818946] env[62974]: DEBUG nova.virt.block_device [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Updating existing volume attachment record: 746c90da-d737-4a38-a82a-14fe16a77ebd {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1210.042237] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.146576] env[62974]: DEBUG nova.objects.instance [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'migration_context' on Instance uuid 744a685d-845e-4818-abb5-c70056fd4cd0 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.541162] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.744994] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc75d090-d3b7-4e99-b455-ed3e07040498 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.754831] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40b5c76-05ab-4d97-b667-edd7e60e09eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.783668] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0daaa489-2cc6-4a7f-83c5-a23e46615fa6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.790378] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e7c855-2e26-4776-8999-c53f40950de3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.803152] env[62974]: DEBUG nova.compute.provider_tree [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.038402] env[62974]: DEBUG nova.compute.manager [req-88d8a8e5-11a2-4bfd-a708-cda50646ff8e req-8d739a29-5963-4fd0-b673-b2115a2b0a87 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-vif-plugged-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1211.038624] env[62974]: DEBUG oslo_concurrency.lockutils [req-88d8a8e5-11a2-4bfd-a708-cda50646ff8e req-8d739a29-5963-4fd0-b673-b2115a2b0a87 service nova] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.038839] env[62974]: DEBUG oslo_concurrency.lockutils [req-88d8a8e5-11a2-4bfd-a708-cda50646ff8e req-8d739a29-5963-4fd0-b673-b2115a2b0a87 service nova] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.039107] env[62974]: DEBUG oslo_concurrency.lockutils [req-88d8a8e5-11a2-4bfd-a708-cda50646ff8e req-8d739a29-5963-4fd0-b673-b2115a2b0a87 service nova] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.039305] env[62974]: DEBUG nova.compute.manager [req-88d8a8e5-11a2-4bfd-a708-cda50646ff8e req-8d739a29-5963-4fd0-b673-b2115a2b0a87 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] No waiting events found dispatching network-vif-plugged-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1211.039471] env[62974]: WARNING nova.compute.manager [req-88d8a8e5-11a2-4bfd-a708-cda50646ff8e req-8d739a29-5963-4fd0-b673-b2115a2b0a87 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received unexpected event network-vif-plugged-6214f8c1-1172-4dbd-b021-d468e0b04110 for instance with vm_state shelved_offloaded and task_state spawning. [ 1211.045058] env[62974]: DEBUG oslo_vmware.api [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655197, 'name': ReconfigVM_Task, 'duration_secs': 5.757633} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.045331] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1211.045472] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Reconfigured VM to detach interface {{(pid=62974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1211.098201] env[62974]: INFO nova.compute.manager [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Rebuilding instance [ 1211.139423] env[62974]: DEBUG nova.compute.manager [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1211.140298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d457de-fede-4b4b-bcc9-2ad5a2718029 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.188096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.188096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.188096] env[62974]: DEBUG nova.network.neutron [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1211.305847] env[62974]: DEBUG nova.scheduler.client.report [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1211.882154] env[62974]: DEBUG nova.network.neutron [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.152774] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1212.153157] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1621d2fd-b8e0-495f-9c01-b75f00ab61b7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.161117] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1212.161117] env[62974]: value = "task-2655200" [ 1212.161117] env[62974]: _type = "Task" [ 1212.161117] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.168792] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.317098] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.673s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.377166] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.377544] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquired lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.377544] env[62974]: DEBUG nova.network.neutron [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1212.384500] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1212.413853] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6aa01930d567800f8db53b30c4121827',container_format='bare',created_at=2025-02-19T04:01:27Z,direct_url=,disk_format='vmdk',id=ccd419f6-57a7-45a7-8f37-9936619bcffe,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-2122048892-shelved',owner='5ecf0c1b56e34a6cbc2d073089e37efc',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2025-02-19T04:01:40Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1212.414114] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1212.414281] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1212.414554] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1212.414607] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1212.414766] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1212.414991] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1212.415164] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1212.415328] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1212.415489] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1212.415661] env[62974]: DEBUG nova.virt.hardware [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1212.416753] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e44416-fc61-4da7-8901-53d6ab48c720 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.425379] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bd7ea3-dbaa-43d0-b542-d0d73bbe393a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.439760] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:58:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7f41333-42ee-47f3-936c-d6701ab786d2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6214f8c1-1172-4dbd-b021-d468e0b04110', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1212.447145] env[62974]: DEBUG oslo.service.loopingcall [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1212.447658] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1212.448101] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0a7dc44-8b01-49b5-9ee5-ea2c033101f9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.468192] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1212.468192] env[62974]: value = "task-2655201" [ 1212.468192] env[62974]: _type = "Task" [ 1212.468192] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.475517] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655201, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.671229] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655200, 'name': PowerOffVM_Task, 'duration_secs': 0.171062} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.671548] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1212.672278] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1212.672594] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c377c944-da9a-49f9-b549-d13bdbdb34c3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.679398] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1212.679398] env[62974]: value = "task-2655202" [ 1212.679398] env[62974]: _type = "Task" [ 1212.679398] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.689228] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] VM already powered off {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1212.689429] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1212.689617] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535504', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'name': 'volume-787e16fc-37a4-4aee-b780-7e5c2733573d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a94cb966-5304-4484-8639-899d7211e8b6', 'attached_at': '', 'detached_at': '', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'serial': '787e16fc-37a4-4aee-b780-7e5c2733573d'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1212.690358] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141784b2-be5f-40df-88b5-a576417e19b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.708887] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed542f6d-b868-447f-8402-818638fdd527 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.715302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebddd39-6f6c-456a-869e-8ce81eeb07f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.732775] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06880b96-6b95-4ab0-898f-14a425a176f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.747879] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] The volume has not been displaced from its original location: [datastore2] volume-787e16fc-37a4-4aee-b780-7e5c2733573d/volume-787e16fc-37a4-4aee-b780-7e5c2733573d.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1212.753192] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1212.753500] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6223b5e-d69d-4741-bfed-6e2589578d76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.771375] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1212.771375] env[62974]: value = "task-2655203" [ 1212.771375] env[62974]: _type = "Task" [ 1212.771375] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.779118] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655203, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.912179] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "007a5e28-7891-4327-ba39-bb9da8e32495" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.912475] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "007a5e28-7891-4327-ba39-bb9da8e32495" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.912692] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.912878] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.913057] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "007a5e28-7891-4327-ba39-bb9da8e32495-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.917919] env[62974]: INFO nova.compute.manager [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Terminating instance [ 1212.979701] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655201, 'name': CreateVM_Task, 'duration_secs': 0.292962} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.979866] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1212.980513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.980677] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.981079] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1212.981330] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c516e42-9d3e-4a1f-9136-b811a2462e78 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.985552] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1212.985552] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d659cc-a4df-c6b5-e3ab-65c2167aa813" [ 1212.985552] env[62974]: _type = "Task" [ 1212.985552] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.993122] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d659cc-a4df-c6b5-e3ab-65c2167aa813, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.064026] env[62974]: DEBUG nova.compute.manager [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1213.064235] env[62974]: DEBUG nova.compute.manager [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing instance network info cache due to event network-changed-6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1213.064451] env[62974]: DEBUG oslo_concurrency.lockutils [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.064645] env[62974]: DEBUG oslo_concurrency.lockutils [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.064760] env[62974]: DEBUG nova.network.neutron [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Refreshing network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1213.081854] env[62974]: INFO nova.network.neutron [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Port d86538f3-95e0-40bc-af76-c59c630febac from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1213.082233] env[62974]: DEBUG nova.network.neutron [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [{"id": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "address": "fa:16:3e:ef:d8:70", "network": {"id": "ad86c457-3431-4c60-bde9-ddba2b588dde", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1965024429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f990de0bcb0403195a272efcc0e104c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84fb0bcd-a9", "ovs_interfaceid": "84fb0bcd-a98e-4006-bc29-19f86ad7822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.280667] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655203, 'name': ReconfigVM_Task, 'duration_secs': 0.152298} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.281044] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1213.285579] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e743c34-a663-4295-836a-871e8e52d04e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.299824] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1213.299824] env[62974]: value = "task-2655204" [ 1213.299824] env[62974]: _type = "Task" [ 1213.299824] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.308714] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.421620] env[62974]: DEBUG nova.compute.manager [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1213.421840] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.422826] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460f27aa-d27b-40aa-88dc-08bcf257f571 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.430426] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.430653] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ace7de85-713b-4248-a866-c6553630be84 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.437141] env[62974]: DEBUG oslo_vmware.api [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1213.437141] env[62974]: value = "task-2655205" [ 1213.437141] env[62974]: _type = "Task" [ 1213.437141] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.444862] env[62974]: DEBUG oslo_vmware.api [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.498058] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.498407] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Processing image ccd419f6-57a7-45a7-8f37-9936619bcffe {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1213.498735] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe/ccd419f6-57a7-45a7-8f37-9936619bcffe.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.498946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe/ccd419f6-57a7-45a7-8f37-9936619bcffe.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.499206] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1213.499536] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58c9a46e-a97c-463d-941d-6806eec34e40 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.508156] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1213.508405] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1213.509424] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c0ceabe-b0ac-46f4-94a5-0daba020f4f7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.515771] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1213.515771] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd73df-24c8-a45a-e2a5-71f85d893fa7" [ 1213.515771] env[62974]: _type = "Task" [ 1213.515771] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.526172] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd73df-24c8-a45a-e2a5-71f85d893fa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.585112] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Releasing lock "refresh_cache-007a5e28-7891-4327-ba39-bb9da8e32495" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.759920] env[62974]: DEBUG nova.network.neutron [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updated VIF entry in instance network info cache for port 6214f8c1-1172-4dbd-b021-d468e0b04110. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1213.759920] env[62974]: DEBUG nova.network.neutron [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.810272] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655204, 'name': ReconfigVM_Task, 'duration_secs': 0.154067} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.810528] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535504', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'name': 'volume-787e16fc-37a4-4aee-b780-7e5c2733573d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a94cb966-5304-4484-8639-899d7211e8b6', 'attached_at': '', 'detached_at': '', 'volume_id': '787e16fc-37a4-4aee-b780-7e5c2733573d', 'serial': '787e16fc-37a4-4aee-b780-7e5c2733573d'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1213.810810] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.811576] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2058b4f-b2e4-4b69-846f-9e37c319fd8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.817891] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1213.818127] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bce66ab8-0841-4910-8633-fd51d88c8e13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.856772] env[62974]: INFO nova.compute.manager [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Swapping old allocation on dict_keys(['bd3bd9ae-180c-41cf-831e-3dd3892efa18']) held by migration 5e2cc16c-f94e-4065-a882-659d452276bd for instance [ 1213.882016] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1213.882257] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1213.882522] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Deleting the datastore file [datastore2] a94cb966-5304-4484-8639-899d7211e8b6 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1213.883373] env[62974]: DEBUG nova.scheduler.client.report [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Overwriting current allocation {'allocations': {'bd3bd9ae-180c-41cf-831e-3dd3892efa18': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 163}}, 'project_id': '567f64e735384503b6c0172050bdfaf5', 'user_id': '1b837770f3f74a5fad99c7cc150e9cde', 'consumer_generation': 1} on consumer 744a685d-845e-4818-abb5-c70056fd4cd0 {{(pid=62974) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1213.885217] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f78db8d-1fd8-43f1-b511-adbd8d4f83a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.891881] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for the task: (returnval){ [ 1213.891881] env[62974]: value = "task-2655207" [ 1213.891881] env[62974]: _type = "Task" [ 1213.891881] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.898930] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.945947] env[62974]: DEBUG oslo_vmware.api [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655205, 'name': PowerOffVM_Task, 'duration_secs': 0.162935} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.946198] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1213.946369] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1213.946595] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53f569b8-b656-479a-9ac0-1e4ba90b41d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.964590] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.964772] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.964944] env[62974]: DEBUG nova.network.neutron [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1214.008075] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.008310] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.008494] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleting the datastore file [datastore2] 007a5e28-7891-4327-ba39-bb9da8e32495 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.008743] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c332db71-556a-4db3-a123-df1e2e37ecd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.015857] env[62974]: DEBUG oslo_vmware.api [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1214.015857] env[62974]: value = "task-2655209" [ 1214.015857] env[62974]: _type = "Task" [ 1214.015857] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.025544] env[62974]: DEBUG oslo_vmware.api [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.028767] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Preparing fetch location {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1214.028993] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Fetch image to [datastore2] OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c/OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c.vmdk {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1214.029191] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Downloading stream optimized image ccd419f6-57a7-45a7-8f37-9936619bcffe to [datastore2] OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c/OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c.vmdk on the data store datastore2 as vApp {{(pid=62974) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1214.029360] env[62974]: DEBUG nova.virt.vmwareapi.images [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Downloading image file data ccd419f6-57a7-45a7-8f37-9936619bcffe to the ESX as VM named 'OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c' {{(pid=62974) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1214.088952] env[62974]: DEBUG oslo_concurrency.lockutils [None req-0669320f-f05a-4a71-a992-2e1330d8a283 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "interface-007a5e28-7891-4327-ba39-bb9da8e32495-d86538f3-95e0-40bc-af76-c59c630febac" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.631s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.114744] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1214.114744] env[62974]: value = "resgroup-9" [ 1214.114744] env[62974]: _type = "ResourcePool" [ 1214.114744] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1214.115066] env[62974]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-1f87cf2b-815c-40a8-a56b-75440937b050 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.135211] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease: (returnval){ [ 1214.135211] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296a114-f41f-8c0e-1531-afed59335e39" [ 1214.135211] env[62974]: _type = "HttpNfcLease" [ 1214.135211] env[62974]: } obtained for vApp import into resource pool (val){ [ 1214.135211] env[62974]: value = "resgroup-9" [ 1214.135211] env[62974]: _type = "ResourcePool" [ 1214.135211] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1214.135660] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the lease: (returnval){ [ 1214.135660] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296a114-f41f-8c0e-1531-afed59335e39" [ 1214.135660] env[62974]: _type = "HttpNfcLease" [ 1214.135660] env[62974]: } to be ready. {{(pid=62974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1214.142289] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1214.142289] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296a114-f41f-8c0e-1531-afed59335e39" [ 1214.142289] env[62974]: _type = "HttpNfcLease" [ 1214.142289] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1214.262967] env[62974]: DEBUG oslo_concurrency.lockutils [req-630c0f25-9ee3-41f8-8a9d-6447aa9d5c31 req-3e7f1322-3b4d-408f-9cce-c3a26ff707f5 service nova] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.364500] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1214.364758] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535511', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'name': 'volume-fc927034-a8e9-49ad-b9a6-de33584632a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '255a1d01-e007-45e5-a2c9-798223f41b30', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'serial': 'fc927034-a8e9-49ad-b9a6-de33584632a9'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1214.365628] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494762c1-e243-409e-bf8a-50c3848d136d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.381571] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3798a5-d729-4b46-b123-be44ff30ab1e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.405484] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-fc927034-a8e9-49ad-b9a6-de33584632a9/volume-fc927034-a8e9-49ad-b9a6-de33584632a9.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1214.408224] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5aa794b-f384-4cbe-a68f-5fdadc16845e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.425120] env[62974]: DEBUG oslo_vmware.api [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Task: {'id': task-2655207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073117} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.426236] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1214.426420] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1214.426593] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1214.428189] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1214.428189] env[62974]: value = "task-2655211" [ 1214.428189] env[62974]: _type = "Task" [ 1214.428189] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.436493] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655211, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.484652] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1214.485030] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2eb5fecf-b26d-4c1f-8503-196a9d3126cb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.494029] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd32b32-31af-4930-8c8e-50d8faf53564 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.527676] env[62974]: ERROR nova.compute.manager [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Failed to detach volume 787e16fc-37a4-4aee-b780-7e5c2733573d from /dev/sda: nova.exception.InstanceNotFound: Instance a94cb966-5304-4484-8639-899d7211e8b6 could not be found. [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] Traceback (most recent call last): [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self.driver.rebuild(**kwargs) [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] raise NotImplementedError() [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] NotImplementedError [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] During handling of the above exception, another exception occurred: [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] Traceback (most recent call last): [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self.driver.detach_volume(context, old_connection_info, [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] return self._volumeops.detach_volume(connection_info, instance) [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self._detach_volume_vmdk(connection_info, instance) [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] stable_ref.fetch_moref(session) [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] nova.exception.InstanceNotFound: Instance a94cb966-5304-4484-8639-899d7211e8b6 could not be found. [ 1214.527676] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.537512] env[62974]: DEBUG oslo_vmware.api [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14279} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.537769] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1214.538061] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1214.538307] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1214.538493] env[62974]: INFO nova.compute.manager [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1214.538734] env[62974]: DEBUG oslo.service.loopingcall [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1214.539071] env[62974]: DEBUG nova.compute.manager [-] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1214.539154] env[62974]: DEBUG nova.network.neutron [-] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1214.647434] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1214.647434] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296a114-f41f-8c0e-1531-afed59335e39" [ 1214.647434] env[62974]: _type = "HttpNfcLease" [ 1214.647434] env[62974]: } is initializing. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1214.667035] env[62974]: DEBUG nova.compute.utils [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Build of instance a94cb966-5304-4484-8639-899d7211e8b6 aborted: Failed to rebuild volume backed instance. {{(pid=62974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1214.669240] env[62974]: ERROR nova.compute.manager [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance a94cb966-5304-4484-8639-899d7211e8b6 aborted: Failed to rebuild volume backed instance. [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] Traceback (most recent call last): [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self.driver.rebuild(**kwargs) [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] raise NotImplementedError() [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] NotImplementedError [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] During handling of the above exception, another exception occurred: [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] Traceback (most recent call last): [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self._detach_root_volume(context, instance, root_bdm) [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] with excutils.save_and_reraise_exception(): [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self.force_reraise() [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] raise self.value [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self.driver.detach_volume(context, old_connection_info, [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] return self._volumeops.detach_volume(connection_info, instance) [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self._detach_volume_vmdk(connection_info, instance) [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] stable_ref.fetch_moref(session) [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] nova.exception.InstanceNotFound: Instance a94cb966-5304-4484-8639-899d7211e8b6 could not be found. [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] During handling of the above exception, another exception occurred: [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] Traceback (most recent call last): [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 11382, in _error_out_instance_on_exception [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] yield [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1214.669240] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self._do_rebuild_instance_with_claim( [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self._do_rebuild_instance( [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self._rebuild_default_impl(**kwargs) [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] self._rebuild_volume_backed_instance( [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] raise exception.BuildAbortException( [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] nova.exception.BuildAbortException: Build of instance a94cb966-5304-4484-8639-899d7211e8b6 aborted: Failed to rebuild volume backed instance. [ 1214.670502] env[62974]: ERROR nova.compute.manager [instance: a94cb966-5304-4484-8639-899d7211e8b6] [ 1214.795571] env[62974]: DEBUG nova.network.neutron [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [{"id": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "address": "fa:16:3e:f0:df:11", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb715d91d-19", "ovs_interfaceid": "b715d91d-19dc-4ecd-9d75-e57c620d897a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.938578] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655211, 'name': ReconfigVM_Task, 'duration_secs': 0.350599} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.938858] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-fc927034-a8e9-49ad-b9a6-de33584632a9/volume-fc927034-a8e9-49ad-b9a6-de33584632a9.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.943586] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ea42294-11d8-449f-b753-3c4a2799ca13 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.959114] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1214.959114] env[62974]: value = "task-2655212" [ 1214.959114] env[62974]: _type = "Task" [ 1214.959114] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.966137] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655212, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.145268] env[62974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1215.145268] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296a114-f41f-8c0e-1531-afed59335e39" [ 1215.145268] env[62974]: _type = "HttpNfcLease" [ 1215.145268] env[62974]: } is ready. {{(pid=62974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1215.145643] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1215.145643] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5296a114-f41f-8c0e-1531-afed59335e39" [ 1215.145643] env[62974]: _type = "HttpNfcLease" [ 1215.145643] env[62974]: }. {{(pid=62974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1215.146293] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a9b222-f053-489f-b222-92b306a702ee {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.154831] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52614d57-f3f5-6aa0-428a-ad50a5ac223e/disk-0.vmdk from lease info. {{(pid=62974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1215.155028] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52614d57-f3f5-6aa0-428a-ad50a5ac223e/disk-0.vmdk. {{(pid=62974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1215.219087] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-92c5cc20-1341-4ecd-a3f8-161bd28bbb8f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.291537] env[62974]: DEBUG nova.compute.manager [req-3dadb8a5-10dd-4af7-aa99-551f9b46d2bb req-33d32626-b29f-4a7c-89f0-e7baea32e05e service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Received event network-vif-deleted-84fb0bcd-a98e-4006-bc29-19f86ad7822d {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1215.291647] env[62974]: INFO nova.compute.manager [req-3dadb8a5-10dd-4af7-aa99-551f9b46d2bb req-33d32626-b29f-4a7c-89f0-e7baea32e05e service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Neutron deleted interface 84fb0bcd-a98e-4006-bc29-19f86ad7822d; detaching it from the instance and deleting it from the info cache [ 1215.291847] env[62974]: DEBUG nova.network.neutron [req-3dadb8a5-10dd-4af7-aa99-551f9b46d2bb req-33d32626-b29f-4a7c-89f0-e7baea32e05e service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.298155] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-744a685d-845e-4818-abb5-c70056fd4cd0" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.298648] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1215.298963] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da53f91d-cc88-4f44-8acd-3a397f371afe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.306804] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1215.306804] env[62974]: value = "task-2655213" [ 1215.306804] env[62974]: _type = "Task" [ 1215.306804] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.314511] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.472505] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655212, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.768670] env[62974]: DEBUG nova.network.neutron [-] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.795167] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-14e0c875-6b99-4091-9a13-0fa0f9fcf563 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.806420] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629151ce-b035-4260-9c7d-b786ea192ba5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.831620] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.839943] env[62974]: DEBUG nova.compute.manager [req-3dadb8a5-10dd-4af7-aa99-551f9b46d2bb req-33d32626-b29f-4a7c-89f0-e7baea32e05e service nova] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Detach interface failed, port_id=84fb0bcd-a98e-4006-bc29-19f86ad7822d, reason: Instance 007a5e28-7891-4327-ba39-bb9da8e32495 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1215.971242] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655212, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.271614] env[62974]: INFO nova.compute.manager [-] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Took 1.73 seconds to deallocate network for instance. [ 1216.330887] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655213, 'name': PowerOffVM_Task, 'duration_secs': 0.955201} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.331978] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Completed reading data from the image iterator. {{(pid=62974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1216.332200] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52614d57-f3f5-6aa0-428a-ad50a5ac223e/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1216.332517] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1216.333212] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1216.333452] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1216.333607] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1216.333806] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1216.333985] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1216.334147] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1216.334377] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1216.334558] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1216.334730] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1216.334895] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1216.335098] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1216.340807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b1d383-25df-4bd9-acf6-4730ce5dde21 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.343386] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd513a92-af68-4506-95b4-f5591b51ca0e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.357018] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52614d57-f3f5-6aa0-428a-ad50a5ac223e/disk-0.vmdk is in state: ready. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1216.357196] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52614d57-f3f5-6aa0-428a-ad50a5ac223e/disk-0.vmdk. {{(pid=62974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1216.357409] env[62974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-47a23daa-e4f3-4b1d-86f0-05ec13dafc00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.359760] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1216.359760] env[62974]: value = "task-2655214" [ 1216.359760] env[62974]: _type = "Task" [ 1216.359760] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.367715] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655214, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.468854] env[62974]: DEBUG oslo_vmware.api [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655212, 'name': ReconfigVM_Task, 'duration_secs': 1.164677} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.469146] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535511', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'name': 'volume-fc927034-a8e9-49ad-b9a6-de33584632a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '255a1d01-e007-45e5-a2c9-798223f41b30', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'serial': 'fc927034-a8e9-49ad-b9a6-de33584632a9'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1216.534984] env[62974]: DEBUG oslo_vmware.rw_handles [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52614d57-f3f5-6aa0-428a-ad50a5ac223e/disk-0.vmdk. {{(pid=62974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1216.535293] env[62974]: INFO nova.virt.vmwareapi.images [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Downloaded image file data ccd419f6-57a7-45a7-8f37-9936619bcffe [ 1216.536200] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c1afe9-793f-46a0-9ebc-72c7bb54c02c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.552504] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e8e58b4-c42d-42b5-ade8-790ae5d7835d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.576311] env[62974]: INFO nova.virt.vmwareapi.images [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] The imported VM was unregistered [ 1216.578812] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Caching image {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1216.579096] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Creating directory with path [datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1216.579395] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d92cada-1ab1-441d-8b17-60d7f4d26175 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.590861] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Created directory with path [datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1216.591048] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c/OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c.vmdk to [datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe/ccd419f6-57a7-45a7-8f37-9936619bcffe.vmdk. {{(pid=62974) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1216.591284] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b5a28991-599e-499e-a40b-2e7d30b703e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.597325] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1216.597325] env[62974]: value = "task-2655216" [ 1216.597325] env[62974]: _type = "Task" [ 1216.597325] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.605599] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655216, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.724290] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.724573] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.782330] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.806772] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6557acde-7460-449d-baad-c8c5b8bca746 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.815807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7fe139-f53c-407f-93ed-7ab5a1fab089 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.849498] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15edc48-b896-40e1-ab00-ec448a4a74e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.857374] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a26cbb-a682-4cf7-9194-bfacae0c9707 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.874408] env[62974]: DEBUG nova.compute.provider_tree [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.878971] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655214, 'name': ReconfigVM_Task, 'duration_secs': 0.132381} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.880084] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c066c78-a6fa-450a-8966-93f6bedfaffb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.898853] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1216.899105] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1216.899264] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1216.899445] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1216.899589] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1216.899774] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1216.899984] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1216.900160] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1216.900329] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1216.900488] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1216.900666] env[62974]: DEBUG nova.virt.hardware [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1216.901495] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-956b8951-151e-421c-b99c-e447ce5ccaa0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.907716] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1216.907716] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a96d48-08f8-d91b-abd8-8d4f1e50866a" [ 1216.907716] env[62974]: _type = "Task" [ 1216.907716] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.917963] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a96d48-08f8-d91b-abd8-8d4f1e50866a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.107257] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655216, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.184292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquiring lock "a94cb966-5304-4484-8639-899d7211e8b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.184564] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "a94cb966-5304-4484-8639-899d7211e8b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.184776] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquiring lock "a94cb966-5304-4484-8639-899d7211e8b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.184958] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "a94cb966-5304-4484-8639-899d7211e8b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.185153] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "a94cb966-5304-4484-8639-899d7211e8b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.187633] env[62974]: INFO nova.compute.manager [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Terminating instance [ 1217.381415] env[62974]: DEBUG nova.scheduler.client.report [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.419435] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a96d48-08f8-d91b-abd8-8d4f1e50866a, 'name': SearchDatastore_Task, 'duration_secs': 0.011125} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.428926] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1217.429394] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-240d430e-4852-4c87-ac54-1301d65fb264 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.452865] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1217.452865] env[62974]: value = "task-2655217" [ 1217.452865] env[62974]: _type = "Task" [ 1217.452865] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.462258] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655217, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.509503] env[62974]: DEBUG nova.objects.instance [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'flavor' on Instance uuid 255a1d01-e007-45e5-a2c9-798223f41b30 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.608867] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655216, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.692491] env[62974]: DEBUG nova.compute.manager [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1217.692805] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1de43514-0a9a-4a6e-8813-7091a618df14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.703410] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3442074-343e-478e-a489-a0702685f01d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.733472] env[62974]: WARNING nova.virt.vmwareapi.driver [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance a94cb966-5304-4484-8639-899d7211e8b6 could not be found. [ 1217.733828] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1217.734231] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-504edbb7-c513-4eaa-8939-978a3ea68d94 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.745262] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a350d47-ee00-435d-86c5-2f7b4369d4fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.776387] env[62974]: WARNING nova.virt.vmwareapi.vmops [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a94cb966-5304-4484-8639-899d7211e8b6 could not be found. [ 1217.778051] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1217.778051] env[62974]: INFO nova.compute.manager [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1217.778051] env[62974]: DEBUG oslo.service.loopingcall [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1217.778051] env[62974]: DEBUG nova.compute.manager [-] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1217.778051] env[62974]: DEBUG nova.network.neutron [-] [instance: a94cb966-5304-4484-8639-899d7211e8b6] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1217.886676] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.162s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.887584] env[62974]: INFO nova.compute.manager [None req-8d104b3b-c6f8-49ee-bd2b-24aa532fbe74 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Successfully reverted task state from rebuilding on failure for instance. [ 1217.892339] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.110s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.892681] env[62974]: DEBUG nova.objects.instance [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'resources' on Instance uuid 007a5e28-7891-4327-ba39-bb9da8e32495 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.963714] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655217, 'name': ReconfigVM_Task, 'duration_secs': 0.322602} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.964048] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1217.964963] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c04dd7-e779-407c-95b9-a611209353e8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.989429] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1217.989770] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd109760-5422-499b-a7dd-e6fe611b5512 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.008651] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1218.008651] env[62974]: value = "task-2655218" [ 1218.008651] env[62974]: _type = "Task" [ 1218.008651] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.015093] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6069d2f-5fee-4792-997d-9efc407d384f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.247s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.019309] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.114907] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655216, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.348396] env[62974]: DEBUG oslo_concurrency.lockutils [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "255a1d01-e007-45e5-a2c9-798223f41b30" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.348673] env[62974]: DEBUG oslo_concurrency.lockutils [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.483656] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7502e744-5ebd-4037-a397-f99345f41bd2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.496063] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d9083f-747c-414f-a97d-d2c1bb445fc3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.535623] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0695db96-57f1-4236-9000-7b2e9b02e55d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.542687] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655218, 'name': ReconfigVM_Task, 'duration_secs': 0.366449} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.545221] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0/744a685d-845e-4818-abb5-c70056fd4cd0.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1218.546244] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b6528b-125f-4eda-8a92-3bd013665c74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.549873] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe19827-8b06-44e6-b830-3bfc809ab7bb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.581338] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e45e78-77af-4a58-adc6-51c2ae16d459 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.584684] env[62974]: DEBUG nova.compute.provider_tree [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.589749] env[62974]: DEBUG nova.compute.manager [req-412195da-5715-48dd-a5a0-2b452e0be812 req-eedaf18c-5731-45d0-a02f-30d6694e087b service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Received event network-vif-deleted-b7fa291b-8b45-466d-be06-2a15ad4d11e1 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1218.589942] env[62974]: INFO nova.compute.manager [req-412195da-5715-48dd-a5a0-2b452e0be812 req-eedaf18c-5731-45d0-a02f-30d6694e087b service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Neutron deleted interface b7fa291b-8b45-466d-be06-2a15ad4d11e1; detaching it from the instance and deleting it from the info cache [ 1218.590127] env[62974]: DEBUG nova.network.neutron [req-412195da-5715-48dd-a5a0-2b452e0be812 req-eedaf18c-5731-45d0-a02f-30d6694e087b service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.613593] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8174dd9e-5fa5-4863-a7ed-bf5811b5db50 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.635167] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655216, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.636775] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc12585e-00d6-4e13-a3a7-a35c1582ae35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.644415] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1218.644691] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be530dad-d95d-4612-b18d-de282e1c6fab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.651155] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1218.651155] env[62974]: value = "task-2655219" [ 1218.651155] env[62974]: _type = "Task" [ 1218.651155] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.663243] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655219, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.852806] env[62974]: INFO nova.compute.manager [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Detaching volume fc927034-a8e9-49ad-b9a6-de33584632a9 [ 1218.890686] env[62974]: INFO nova.virt.block_device [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Attempting to driver detach volume fc927034-a8e9-49ad-b9a6-de33584632a9 from mountpoint /dev/sdb [ 1218.890932] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1218.891144] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535511', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'name': 'volume-fc927034-a8e9-49ad-b9a6-de33584632a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '255a1d01-e007-45e5-a2c9-798223f41b30', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'serial': 'fc927034-a8e9-49ad-b9a6-de33584632a9'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1218.892094] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4ab8e3-78d2-4b43-b1f0-8bb5d0f9e01b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.917552] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582b73fb-6027-4d06-a752-f300f4ac1bd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.925145] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e4eede-9f71-447e-b826-d068684e4ab6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.945881] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba1945d-60c0-43a8-bd10-ea06c0976490 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.963040] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] The volume has not been displaced from its original location: [datastore2] volume-fc927034-a8e9-49ad-b9a6-de33584632a9/volume-fc927034-a8e9-49ad-b9a6-de33584632a9.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1218.968508] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1218.969020] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f0b3425-f420-4c58-a655-2abc4b9c5003 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.986285] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1218.986285] env[62974]: value = "task-2655220" [ 1218.986285] env[62974]: _type = "Task" [ 1218.986285] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.995764] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655220, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.091025] env[62974]: DEBUG nova.network.neutron [-] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.093545] env[62974]: DEBUG nova.scheduler.client.report [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1219.097329] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec05219b-46f0-41e1-8455-62ca6f425346 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.110744] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66d64b7-c23b-491e-a7a8-70c40119dea6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.135421] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655216, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.438833} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.135801] env[62974]: INFO nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c/OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c.vmdk to [datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe/ccd419f6-57a7-45a7-8f37-9936619bcffe.vmdk. [ 1219.136063] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Cleaning up location [datastore2] OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1219.136262] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_817df0db-8717-4cfc-8910-0f951efe253c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1219.144177] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ddec22f-26bd-45cc-9a43-86f1e66f5815 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.146911] env[62974]: DEBUG nova.compute.manager [req-412195da-5715-48dd-a5a0-2b452e0be812 req-eedaf18c-5731-45d0-a02f-30d6694e087b service nova] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Detach interface failed, port_id=b7fa291b-8b45-466d-be06-2a15ad4d11e1, reason: Instance a94cb966-5304-4484-8639-899d7211e8b6 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1219.151746] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1219.151746] env[62974]: value = "task-2655221" [ 1219.151746] env[62974]: _type = "Task" [ 1219.151746] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.164052] env[62974]: DEBUG oslo_vmware.api [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655219, 'name': PowerOnVM_Task, 'duration_secs': 0.448785} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.167392] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1219.170827] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.496941] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655220, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.597606] env[62974]: INFO nova.compute.manager [-] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Took 1.82 seconds to deallocate network for instance. [ 1219.598461] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.620748] env[62974]: INFO nova.scheduler.client.report [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted allocations for instance 007a5e28-7891-4327-ba39-bb9da8e32495 [ 1219.665516] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062554} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.665777] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1219.665941] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe/ccd419f6-57a7-45a7-8f37-9936619bcffe.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.666280] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe/ccd419f6-57a7-45a7-8f37-9936619bcffe.vmdk to [datastore2] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1219.666547] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51d892cf-9f69-4335-a1a9-a14518822e99 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.676663] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1219.676663] env[62974]: value = "task-2655222" [ 1219.676663] env[62974]: _type = "Task" [ 1219.676663] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.685078] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655222, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.999857] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655220, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.130647] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8f2e29be-3f47-4e2f-856b-26fbd693376c tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "007a5e28-7891-4327-ba39-bb9da8e32495" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.218s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.158883] env[62974]: INFO nova.compute.manager [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Took 0.56 seconds to detach 1 volumes for instance. [ 1220.161076] env[62974]: DEBUG nova.compute.manager [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Deleting volume: 787e16fc-37a4-4aee-b780-7e5c2733573d {{(pid=62974) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1220.179589] env[62974]: INFO nova.compute.manager [None req-8a93a5cb-d665-4cc7-b883-132a075e8571 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance to original state: 'active' [ 1220.193694] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655222, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.501867] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655220, 'name': ReconfigVM_Task, 'duration_secs': 1.440653} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.502196] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1220.507041] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3b945f4-f767-46e8-87ac-88e5b5011247 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.522554] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1220.522554] env[62974]: value = "task-2655224" [ 1220.522554] env[62974]: _type = "Task" [ 1220.522554] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.531785] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655224, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.694382] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655222, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.732563] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.732873] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.733088] env[62974]: DEBUG nova.objects.instance [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lazy-loading 'resources' on Instance uuid a94cb966-5304-4484-8639-899d7211e8b6 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.737236] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.737466] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.737733] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.739167] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.739167] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.739927] env[62974]: INFO nova.compute.manager [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Terminating instance [ 1221.033558] env[62974]: DEBUG oslo_vmware.api [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655224, 'name': ReconfigVM_Task, 'duration_secs': 0.2224} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.034028] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535511', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'name': 'volume-fc927034-a8e9-49ad-b9a6-de33584632a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '255a1d01-e007-45e5-a2c9-798223f41b30', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc927034-a8e9-49ad-b9a6-de33584632a9', 'serial': 'fc927034-a8e9-49ad-b9a6-de33584632a9'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1221.201943] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655222, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.246089] env[62974]: DEBUG nova.compute.manager [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.246406] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.247328] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19960e59-4b08-4dcc-a5b2-34278e4633c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.256132] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.256379] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f724e746-bd21-41f3-abd3-064aabcf5e20 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.262831] env[62974]: DEBUG oslo_vmware.api [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1221.262831] env[62974]: value = "task-2655225" [ 1221.262831] env[62974]: _type = "Task" [ 1221.262831] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.271979] env[62974]: DEBUG oslo_vmware.api [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.317439] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70357a93-7e37-4657-9ac0-f4e5c9e3a0ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.325316] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d81506-5170-433e-b89e-15d598ededde {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.358526] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "744a685d-845e-4818-abb5-c70056fd4cd0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.358797] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.359046] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.359249] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.359416] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.361990] env[62974]: INFO nova.compute.manager [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Terminating instance [ 1221.364072] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76518483-3e22-4585-bef1-fbba34ca26c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.373744] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fed7bdf-9c0e-4c8e-9dac-a151be7d9078 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.388243] env[62974]: DEBUG nova.compute.provider_tree [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.586413] env[62974]: DEBUG nova.objects.instance [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'flavor' on Instance uuid 255a1d01-e007-45e5-a2c9-798223f41b30 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1221.694579] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655222, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.774596] env[62974]: DEBUG oslo_vmware.api [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.797457] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.797781] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.869600] env[62974]: DEBUG nova.compute.manager [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.869865] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.871093] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a303e94-78ac-41d9-af68-3b48aee5da37 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.879838] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.880208] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3801f99b-5a25-4b98-9c09-f99da5cd9b2e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.887381] env[62974]: DEBUG oslo_vmware.api [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1221.887381] env[62974]: value = "task-2655226" [ 1221.887381] env[62974]: _type = "Task" [ 1221.887381] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.891442] env[62974]: DEBUG nova.scheduler.client.report [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1221.902614] env[62974]: DEBUG oslo_vmware.api [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.195425] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655222, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.382667} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.195698] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ccd419f6-57a7-45a7-8f37-9936619bcffe/ccd419f6-57a7-45a7-8f37-9936619bcffe.vmdk to [datastore2] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1222.196498] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a415ba-6f8e-4e83-923e-413fafd79b65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.221726] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.222044] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7825960c-6900-4231-9409-e87d2371c52a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.241520] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1222.241520] env[62974]: value = "task-2655227" [ 1222.241520] env[62974]: _type = "Task" [ 1222.241520] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.250606] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655227, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.276952] env[62974]: DEBUG oslo_vmware.api [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655225, 'name': PowerOffVM_Task, 'duration_secs': 0.669091} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.281348] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.281559] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.281845] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16097d8e-1026-42e2-910b-76aef5193c6f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.307022] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.307022] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1222.307022] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Rebuilding the list of instances to heal {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1222.398113] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.400063] env[62974]: DEBUG oslo_vmware.api [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655226, 'name': PowerOffVM_Task, 'duration_secs': 0.28249} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.401028] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.401028] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.401028] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36c9c50d-f119-46f8-b59d-a40281fb76d4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.594426] env[62974]: DEBUG oslo_concurrency.lockutils [None req-643c0f0e-33f5-4839-9c68-61f94bfd5781 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.246s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.644468] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.644784] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.644884] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleting the datastore file [datastore1] d7ca15a3-edd2-48a2-9ee0-5d2072f1310a {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.646065] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd83167e-f9ae-4642-8d12-6a9fdc8e6a5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.647976] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.648179] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.648350] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleting the datastore file [datastore1] 744a685d-845e-4818-abb5-c70056fd4cd0 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.648924] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68dc1f7f-c91b-479a-9b68-010063da5f53 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.655478] env[62974]: DEBUG oslo_vmware.api [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for the task: (returnval){ [ 1222.655478] env[62974]: value = "task-2655230" [ 1222.655478] env[62974]: _type = "Task" [ 1222.655478] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.656814] env[62974]: DEBUG oslo_vmware.api [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1222.656814] env[62974]: value = "task-2655231" [ 1222.656814] env[62974]: _type = "Task" [ 1222.656814] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.667854] env[62974]: DEBUG oslo_vmware.api [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.670630] env[62974]: DEBUG oslo_vmware.api [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.753808] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655227, 'name': ReconfigVM_Task, 'duration_secs': 0.285044} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.753808] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 521b463f-98f9-4365-b446-5de9af79f220/521b463f-98f9-4365-b446-5de9af79f220.vmdk or device None with type streamOptimized {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1222.753808] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad48e79c-f8a4-42ff-9160-fc9eb919600f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.760984] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1222.760984] env[62974]: value = "task-2655232" [ 1222.760984] env[62974]: _type = "Task" [ 1222.760984] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.768699] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655232, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.809687] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Skipping network cache update for instance because it is being deleted. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 1222.809969] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Skipping network cache update for instance because it is being deleted. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 1222.810447] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.810648] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.810889] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1222.811128] env[62974]: DEBUG nova.objects.instance [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lazy-loading 'info_cache' on Instance uuid 521b463f-98f9-4365-b446-5de9af79f220 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1222.919946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9b836426-6768-4596-b4bd-43a50be27e40 tempest-ServerActionsV293TestJSON-1688415250 tempest-ServerActionsV293TestJSON-1688415250-project-member] Lock "a94cb966-5304-4484-8639-899d7211e8b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.735s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.178707] env[62974]: DEBUG oslo_vmware.api [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Task: {'id': task-2655230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192425} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.178994] env[62974]: DEBUG oslo_vmware.api [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182859} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.179491] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.179797] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.180110] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.180246] env[62974]: INFO nova.compute.manager [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Took 1.93 seconds to destroy the instance on the hypervisor. [ 1223.180808] env[62974]: DEBUG oslo.service.loopingcall [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1223.180808] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.180988] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.181157] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.181366] env[62974]: INFO nova.compute.manager [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1223.181612] env[62974]: DEBUG oslo.service.loopingcall [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1223.182131] env[62974]: DEBUG nova.compute.manager [-] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.182253] env[62974]: DEBUG nova.network.neutron [-] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1223.184106] env[62974]: DEBUG nova.compute.manager [-] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.184231] env[62974]: DEBUG nova.network.neutron [-] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1223.271401] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655232, 'name': Rename_Task, 'duration_secs': 0.136514} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.271680] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1223.271925] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-282f7264-fc24-4bc8-8521-b3d231a38ed4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.280277] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1223.280277] env[62974]: value = "task-2655233" [ 1223.280277] env[62974]: _type = "Task" [ 1223.280277] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.289826] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655233, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.592711] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "9450a3f2-4b2b-4022-842f-f24a8c470098" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.592940] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.620713] env[62974]: DEBUG nova.compute.manager [req-76b04dc5-f455-40fc-a102-a2dbb3e2c781 req-8ccf5185-7d3e-4eb5-880c-e6182d5c2de8 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Received event network-vif-deleted-3130f1da-8f58-4210-ac5f-966ca6592a53 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1223.620936] env[62974]: INFO nova.compute.manager [req-76b04dc5-f455-40fc-a102-a2dbb3e2c781 req-8ccf5185-7d3e-4eb5-880c-e6182d5c2de8 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Neutron deleted interface 3130f1da-8f58-4210-ac5f-966ca6592a53; detaching it from the instance and deleting it from the info cache [ 1223.621118] env[62974]: DEBUG nova.network.neutron [req-76b04dc5-f455-40fc-a102-a2dbb3e2c781 req-8ccf5185-7d3e-4eb5-880c-e6182d5c2de8 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.680292] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "255a1d01-e007-45e5-a2c9-798223f41b30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.680598] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.680819] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "255a1d01-e007-45e5-a2c9-798223f41b30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.681014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.681194] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.684872] env[62974]: INFO nova.compute.manager [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Terminating instance [ 1223.791243] env[62974]: DEBUG oslo_vmware.api [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655233, 'name': PowerOnVM_Task, 'duration_secs': 0.459995} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.791549] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1223.961410] env[62974]: DEBUG nova.compute.manager [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1223.962529] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a149ad32-ef26-49d1-9832-8fbcc934b0d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.099822] env[62974]: DEBUG nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1224.103873] env[62974]: DEBUG nova.network.neutron [-] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.124910] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-193433dd-f603-4f34-860c-f9ec57f94657 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.136203] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618cfb43-a6b1-4004-8c97-95d803c57f19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.163457] env[62974]: DEBUG nova.compute.manager [req-76b04dc5-f455-40fc-a102-a2dbb3e2c781 req-8ccf5185-7d3e-4eb5-880c-e6182d5c2de8 service nova] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Detach interface failed, port_id=3130f1da-8f58-4210-ac5f-966ca6592a53, reason: Instance d7ca15a3-edd2-48a2-9ee0-5d2072f1310a could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1224.190451] env[62974]: DEBUG nova.compute.manager [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1224.190451] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1224.190451] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d54b3e7-40f9-4bce-a6e9-9fcbbf788c35 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.196913] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1224.197145] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2a015b8-6f83-4773-9bb4-f763b522ef1c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.203347] env[62974]: DEBUG oslo_vmware.api [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1224.203347] env[62974]: value = "task-2655234" [ 1224.203347] env[62974]: _type = "Task" [ 1224.203347] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.210912] env[62974]: DEBUG oslo_vmware.api [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.286404] env[62974]: DEBUG nova.network.neutron [-] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.484774] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8b7ca38b-7abc-42cf-9e9f-8d5634d1ad33 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.034s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.536367] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [{"id": "6214f8c1-1172-4dbd-b021-d468e0b04110", "address": "fa:16:3e:1c:58:ff", "network": {"id": "71d8a920-c482-4b45-9403-2d49b6196641", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1406222692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ecf0c1b56e34a6cbc2d073089e37efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6214f8c1-11", "ovs_interfaceid": "6214f8c1-1172-4dbd-b021-d468e0b04110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.606879] env[62974]: INFO nova.compute.manager [-] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Took 1.42 seconds to deallocate network for instance. [ 1224.621774] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.622146] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.623731] env[62974]: INFO nova.compute.claims [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1224.713267] env[62974]: DEBUG oslo_vmware.api [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655234, 'name': PowerOffVM_Task, 'duration_secs': 0.425053} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.713547] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1224.713730] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1224.713978] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c23825b-8d4a-4729-8411-fee6e93e5c5f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.788658] env[62974]: INFO nova.compute.manager [-] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Took 1.60 seconds to deallocate network for instance. [ 1224.807555] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1224.807931] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1224.807931] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleting the datastore file [datastore2] 255a1d01-e007-45e5-a2c9-798223f41b30 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.808217] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db2177e9-594d-4c87-b097-248765c37281 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.814497] env[62974]: DEBUG oslo_vmware.api [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1224.814497] env[62974]: value = "task-2655236" [ 1224.814497] env[62974]: _type = "Task" [ 1224.814497] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.822573] env[62974]: DEBUG oslo_vmware.api [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.039020] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-521b463f-98f9-4365-b446-5de9af79f220" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.039218] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1225.039429] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.039605] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.039757] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.039904] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.040103] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.040226] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.040355] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1225.040542] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.115419] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.295377] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.324233] env[62974]: DEBUG oslo_vmware.api [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183903} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.324489] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1225.324672] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1225.324846] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1225.325023] env[62974]: INFO nova.compute.manager [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1225.325265] env[62974]: DEBUG oslo.service.loopingcall [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1225.325451] env[62974]: DEBUG nova.compute.manager [-] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1225.325545] env[62974]: DEBUG nova.network.neutron [-] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1225.544147] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.646891] env[62974]: DEBUG nova.compute.manager [req-609d87a1-77ca-4997-98c0-c10bff2973bf req-066ddb9d-c85c-40ec-b638-a3e387b3c7c2 service nova] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Received event network-vif-deleted-b715d91d-19dc-4ecd-9d75-e57c620d897a {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1225.707458] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74de171-1d81-4b39-8003-9dfd6033ec30 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.716807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b16d59-d44d-4497-80e7-dc1b73b13e7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.750089] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccca637-2ea3-4452-8798-8a77978584fc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.757622] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654ed480-6780-4d46-aae4-3fc13f705fb0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.773296] env[62974]: DEBUG nova.compute.provider_tree [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.789320] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.789618] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.789877] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "521b463f-98f9-4365-b446-5de9af79f220-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.790096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.790968] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.792185] env[62974]: INFO nova.compute.manager [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Terminating instance [ 1226.249633] env[62974]: DEBUG nova.network.neutron [-] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.277073] env[62974]: DEBUG nova.scheduler.client.report [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.297501] env[62974]: DEBUG nova.compute.manager [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1226.297501] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1226.298717] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b7e99b-4e9c-49fc-a4c0-7b4665ee67e5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.310538] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1226.310978] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78513797-dcec-4fc5-9a17-98f67db321f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.320905] env[62974]: DEBUG oslo_vmware.api [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1226.320905] env[62974]: value = "task-2655237" [ 1226.320905] env[62974]: _type = "Task" [ 1226.320905] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.333052] env[62974]: DEBUG oslo_vmware.api [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.753996] env[62974]: INFO nova.compute.manager [-] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Took 1.43 seconds to deallocate network for instance. [ 1226.783514] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.161s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.784194] env[62974]: DEBUG nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1226.786761] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.671s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.786987] env[62974]: DEBUG nova.objects.instance [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lazy-loading 'resources' on Instance uuid d7ca15a3-edd2-48a2-9ee0-5d2072f1310a {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.833252] env[62974]: DEBUG oslo_vmware.api [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655237, 'name': PowerOffVM_Task, 'duration_secs': 0.218274} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.833618] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1226.833859] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1226.834525] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f30be399-6bd4-46d7-b627-bbea22a518f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.901453] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1226.901652] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1226.901838] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleting the datastore file [datastore2] 521b463f-98f9-4365-b446-5de9af79f220 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1226.902122] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7c79279-4356-4b3e-8661-12a969752522 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.908871] env[62974]: DEBUG oslo_vmware.api [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for the task: (returnval){ [ 1226.908871] env[62974]: value = "task-2655239" [ 1226.908871] env[62974]: _type = "Task" [ 1226.908871] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.916827] env[62974]: DEBUG oslo_vmware.api [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.261105] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.289628] env[62974]: DEBUG nova.compute.utils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1227.294088] env[62974]: DEBUG nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1227.294329] env[62974]: DEBUG nova.network.neutron [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1227.340539] env[62974]: DEBUG nova.policy [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d7f657874c84dd59bdffc71d827c851', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '465d89c041344f449b8f4d37d28a9804', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1227.370623] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d0901d-e4cb-4663-b12c-e90f45d625af {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.378641] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71607e02-f878-4143-b970-36661edf6907 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.408799] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e91f42-812b-442c-9d98-6c81e33c8492 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.422122] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157db008-a5c8-4183-98bb-4edc6b8b1806 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.425941] env[62974]: DEBUG oslo_vmware.api [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Task: {'id': task-2655239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214298} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.426209] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1227.426397] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1227.426566] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1227.426738] env[62974]: INFO nova.compute.manager [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1227.426976] env[62974]: DEBUG oslo.service.loopingcall [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.427616] env[62974]: DEBUG nova.compute.manager [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1227.427691] env[62974]: DEBUG nova.network.neutron [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1227.438568] env[62974]: DEBUG nova.compute.provider_tree [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.674139] env[62974]: DEBUG nova.compute.manager [req-909b2368-4ca3-487f-b849-f49050fc6590 req-ea44e071-4e18-41f5-9af8-3fa0a80128ef service nova] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Received event network-vif-deleted-c013e4f0-4d15-4230-bcb4-15cbadf79757 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1227.748989] env[62974]: DEBUG nova.network.neutron [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Successfully created port: 3e3d7b6a-32ca-4958-ac57-63fd0e6971aa {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1227.795140] env[62974]: DEBUG nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1227.944537] env[62974]: DEBUG nova.scheduler.client.report [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.456696] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.670s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.458969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.164s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.459180] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.460952] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.917s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.461138] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.461291] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1228.461567] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.201s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.462172] env[62974]: DEBUG nova.objects.instance [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'resources' on Instance uuid 255a1d01-e007-45e5-a2c9-798223f41b30 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.463850] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5f3b0b-545c-4a55-829b-40597265b2aa {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.472722] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f7aa66-45cb-4730-8ace-e86f75aaea7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.487538] env[62974]: INFO nova.scheduler.client.report [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted allocations for instance 744a685d-845e-4818-abb5-c70056fd4cd0 [ 1228.489029] env[62974]: INFO nova.scheduler.client.report [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Deleted allocations for instance d7ca15a3-edd2-48a2-9ee0-5d2072f1310a [ 1228.490391] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70d927d-9aac-4371-a0b3-3188a062a7a3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.501752] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3375454a-fa5c-4273-85e8-a6e8679627a1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.533394] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180565MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1228.533394] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.626062] env[62974]: DEBUG nova.network.neutron [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.804343] env[62974]: DEBUG nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1228.829575] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1228.829842] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.829998] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1228.830194] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.830342] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1228.830488] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1228.830717] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1228.830888] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1228.831109] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1228.831282] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1228.831454] env[62974]: DEBUG nova.virt.hardware [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1228.832318] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e7077b-7783-433c-812f-5ebbe530c1a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.840426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ccde71-4be1-4e16-886a-bd9254dba84d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.002021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-4943c1ba-c098-4ee9-b4d9-c01dc4ee1eaa tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "744a685d-845e-4818-abb5-c70056fd4cd0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.643s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.004981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b9e2cfa1-d1b8-4fc0-9f14-0b9bf17df727 tempest-AttachInterfacesTestJSON-1102515726 tempest-AttachInterfacesTestJSON-1102515726-project-member] Lock "d7ca15a3-edd2-48a2-9ee0-5d2072f1310a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.267s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.017436] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304bbc80-1549-4e45-a0a8-2ecc4d4f74ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.026053] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b5f6f5-4953-489a-86be-682441d46b60 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.055737] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab89d055-c738-4df6-8653-64ea32aacc3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.062945] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8973dc9-a1c7-48d6-b4b0-91735befbbe3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.077687] env[62974]: DEBUG nova.compute.provider_tree [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.128232] env[62974]: INFO nova.compute.manager [-] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Took 1.70 seconds to deallocate network for instance. [ 1229.366350] env[62974]: DEBUG nova.network.neutron [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Successfully updated port: 3e3d7b6a-32ca-4958-ac57-63fd0e6971aa {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.581185] env[62974]: DEBUG nova.scheduler.client.report [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1229.635881] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.713633] env[62974]: DEBUG nova.compute.manager [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Received event network-vif-deleted-6214f8c1-1172-4dbd-b021-d468e0b04110 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1229.713842] env[62974]: DEBUG nova.compute.manager [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Received event network-vif-plugged-3e3d7b6a-32ca-4958-ac57-63fd0e6971aa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1229.714042] env[62974]: DEBUG oslo_concurrency.lockutils [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] Acquiring lock "9450a3f2-4b2b-4022-842f-f24a8c470098-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.714252] env[62974]: DEBUG oslo_concurrency.lockutils [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.714418] env[62974]: DEBUG oslo_concurrency.lockutils [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.714579] env[62974]: DEBUG nova.compute.manager [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] No waiting events found dispatching network-vif-plugged-3e3d7b6a-32ca-4958-ac57-63fd0e6971aa {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1229.714742] env[62974]: WARNING nova.compute.manager [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Received unexpected event network-vif-plugged-3e3d7b6a-32ca-4958-ac57-63fd0e6971aa for instance with vm_state building and task_state spawning. [ 1229.714896] env[62974]: DEBUG nova.compute.manager [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Received event network-changed-3e3d7b6a-32ca-4958-ac57-63fd0e6971aa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1229.715063] env[62974]: DEBUG nova.compute.manager [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Refreshing instance network info cache due to event network-changed-3e3d7b6a-32ca-4958-ac57-63fd0e6971aa. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1229.716052] env[62974]: DEBUG oslo_concurrency.lockutils [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] Acquiring lock "refresh_cache-9450a3f2-4b2b-4022-842f-f24a8c470098" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.716052] env[62974]: DEBUG oslo_concurrency.lockutils [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] Acquired lock "refresh_cache-9450a3f2-4b2b-4022-842f-f24a8c470098" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.716052] env[62974]: DEBUG nova.network.neutron [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Refreshing network info cache for port 3e3d7b6a-32ca-4958-ac57-63fd0e6971aa {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1229.874860] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "refresh_cache-9450a3f2-4b2b-4022-842f-f24a8c470098" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.086321] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.088728] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.556s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.126440] env[62974]: INFO nova.scheduler.client.report [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted allocations for instance 255a1d01-e007-45e5-a2c9-798223f41b30 [ 1230.249816] env[62974]: DEBUG nova.network.neutron [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1230.317952] env[62974]: DEBUG nova.network.neutron [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.633935] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a8953f78-d202-44cb-b76a-167758e7490f tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "255a1d01-e007-45e5-a2c9-798223f41b30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.953s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.721505] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.721617] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.825016] env[62974]: DEBUG oslo_concurrency.lockutils [req-45c6e2cc-3f7b-4c5f-a3b3-a16510fbe981 req-72faf4db-e2a7-46c3-b9cd-0c6182696bb4 service nova] Releasing lock "refresh_cache-9450a3f2-4b2b-4022-842f-f24a8c470098" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.825243] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquired lock "refresh_cache-9450a3f2-4b2b-4022-842f-f24a8c470098" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.825398] env[62974]: DEBUG nova.network.neutron [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1231.114295] env[62974]: WARNING nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 521b463f-98f9-4365-b446-5de9af79f220 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1231.114525] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 9450a3f2-4b2b-4022-842f-f24a8c470098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.224626] env[62974]: DEBUG nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1231.373953] env[62974]: DEBUG nova.network.neutron [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1231.541910] env[62974]: DEBUG nova.network.neutron [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Updating instance_info_cache with network_info: [{"id": "3e3d7b6a-32ca-4958-ac57-63fd0e6971aa", "address": "fa:16:3e:09:9d:a8", "network": {"id": "babe88e2-913e-4c37-8c0e-7121541a3b12", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-973293995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "465d89c041344f449b8f4d37d28a9804", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7869cc8e-e58f-4fd6-88d7-85a18e43cd3a", "external-id": "nsx-vlan-transportzone-927", "segmentation_id": 927, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e3d7b6a-32", "ovs_interfaceid": "3e3d7b6a-32ca-4958-ac57-63fd0e6971aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.617813] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.618048] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1231.618201] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1231.674315] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e91842-0989-4373-9429-5a3148bf7ed1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.681867] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e51eab-7444-4654-985f-ac6071aa3e08 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.717623] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f443d77-c1bd-4f22-9f13-3002942e5038 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.725064] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a949a58-de31-42fd-88af-ecde5154fdf9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.742163] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.744730] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.045429] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Releasing lock "refresh_cache-9450a3f2-4b2b-4022-842f-f24a8c470098" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.045553] env[62974]: DEBUG nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Instance network_info: |[{"id": "3e3d7b6a-32ca-4958-ac57-63fd0e6971aa", "address": "fa:16:3e:09:9d:a8", "network": {"id": "babe88e2-913e-4c37-8c0e-7121541a3b12", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-973293995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "465d89c041344f449b8f4d37d28a9804", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7869cc8e-e58f-4fd6-88d7-85a18e43cd3a", "external-id": "nsx-vlan-transportzone-927", "segmentation_id": 927, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e3d7b6a-32", "ovs_interfaceid": "3e3d7b6a-32ca-4958-ac57-63fd0e6971aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1232.045958] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:9d:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7869cc8e-e58f-4fd6-88d7-85a18e43cd3a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e3d7b6a-32ca-4958-ac57-63fd0e6971aa', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1232.053586] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Creating folder: Project (465d89c041344f449b8f4d37d28a9804). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1232.053893] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2c91f9f-a552-4711-b114-74ec81aae4b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.064686] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Created folder: Project (465d89c041344f449b8f4d37d28a9804) in parent group-v535199. [ 1232.064885] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Creating folder: Instances. Parent ref: group-v535514. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1232.065128] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75490d1b-7301-4133-ae4a-e4c6f2cd6a6c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.073750] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Created folder: Instances in parent group-v535514. [ 1232.073986] env[62974]: DEBUG oslo.service.loopingcall [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1232.074187] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1232.074386] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00fde9ab-37bf-4c10-9a60-e36db3d01184 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.092321] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1232.092321] env[62974]: value = "task-2655242" [ 1232.092321] env[62974]: _type = "Task" [ 1232.092321] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.100166] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655242, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.248025] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1232.602715] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655242, 'name': CreateVM_Task, 'duration_secs': 0.360931} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.602947] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1232.603690] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.603885] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.604227] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1232.604481] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cc9539e-c23e-469b-a5da-8c97fb0bd301 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.609450] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1232.609450] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e5e070-9348-e129-6d63-e9a5585fd590" [ 1232.609450] env[62974]: _type = "Task" [ 1232.609450] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.617966] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e5e070-9348-e129-6d63-e9a5585fd590, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.750119] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1232.750468] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.662s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.750587] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.115s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.750769] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.752977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.008s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.754845] env[62974]: INFO nova.compute.claims [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1232.776933] env[62974]: INFO nova.scheduler.client.report [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Deleted allocations for instance 521b463f-98f9-4365-b446-5de9af79f220 [ 1233.013412] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.013412] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.120076] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e5e070-9348-e129-6d63-e9a5585fd590, 'name': SearchDatastore_Task, 'duration_secs': 0.011063} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.120627] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.121105] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1233.121482] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.121775] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.122112] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1233.122571] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee57384f-ae8b-4965-a219-4ae420130569 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.132031] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1233.132148] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1233.133507] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbd20ad2-bc17-45d5-b784-ea45077dafa5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.139482] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1233.139482] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2d1f2-f348-bac3-2475-4a50ce8a3b0d" [ 1233.139482] env[62974]: _type = "Task" [ 1233.139482] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.148061] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2d1f2-f348-bac3-2475-4a50ce8a3b0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.291871] env[62974]: DEBUG oslo_concurrency.lockutils [None req-dbb0c54d-a2c3-4da4-ae54-e074d0c532a8 tempest-ServerActionsTestOtherB-1675782520 tempest-ServerActionsTestOtherB-1675782520-project-member] Lock "521b463f-98f9-4365-b446-5de9af79f220" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.502s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.516014] env[62974]: DEBUG nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1233.650465] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c2d1f2-f348-bac3-2475-4a50ce8a3b0d, 'name': SearchDatastore_Task, 'duration_secs': 0.010285} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.651348] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1402d83a-7fe0-44f7-8267-5c7808b648ad {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.657464] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1233.657464] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bf7d85-c25f-8ea9-ed7b-9363b3e4a64c" [ 1233.657464] env[62974]: _type = "Task" [ 1233.657464] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.666142] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bf7d85-c25f-8ea9-ed7b-9363b3e4a64c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.819403] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e4b7d7-4277-4664-8bd8-252973344328 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.827921] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e817adb6-f7f3-4167-adc0-224b74198187 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.867372] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07351fba-49bf-4f6d-a81c-58c7b232b78b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.879376] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da39efb9-a809-476e-8343-2f0847330d34 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.892425] env[62974]: DEBUG nova.compute.provider_tree [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.044216] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.167678] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52bf7d85-c25f-8ea9-ed7b-9363b3e4a64c, 'name': SearchDatastore_Task, 'duration_secs': 0.010066} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.167946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.168220] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 9450a3f2-4b2b-4022-842f-f24a8c470098/9450a3f2-4b2b-4022-842f-f24a8c470098.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1234.168476] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a54819bf-a545-465c-b3ac-bf11fb70af9a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.175530] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1234.175530] env[62974]: value = "task-2655244" [ 1234.175530] env[62974]: _type = "Task" [ 1234.175530] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.183090] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.397337] env[62974]: DEBUG nova.scheduler.client.report [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1234.685724] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655244, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44947} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.685948] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 9450a3f2-4b2b-4022-842f-f24a8c470098/9450a3f2-4b2b-4022-842f-f24a8c470098.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1234.686180] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1234.686428] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49c25342-0eed-4945-94e9-0a1232584241 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.693343] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1234.693343] env[62974]: value = "task-2655245" [ 1234.693343] env[62974]: _type = "Task" [ 1234.693343] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.700866] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655245, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.902137] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.149s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.902838] env[62974]: DEBUG nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1234.905876] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.864s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.907335] env[62974]: INFO nova.compute.claims [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1234.950569] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "01d0c91c-1724-453c-8d83-8f9e77afcef1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.950969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.202546] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655245, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.163101} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.203023] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1235.203672] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf8e8c4-bb54-4e25-90f4-4973fc9c5990 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.226817] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 9450a3f2-4b2b-4022-842f-f24a8c470098/9450a3f2-4b2b-4022-842f-f24a8c470098.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1235.227410] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-515250e1-56e0-426b-8014-f38e7d2cb122 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.247657] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1235.247657] env[62974]: value = "task-2655246" [ 1235.247657] env[62974]: _type = "Task" [ 1235.247657] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.257659] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.412867] env[62974]: DEBUG nova.compute.utils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1235.418755] env[62974]: DEBUG nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1235.418755] env[62974]: DEBUG nova.network.neutron [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1235.453210] env[62974]: DEBUG nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1235.461935] env[62974]: DEBUG nova.policy [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b837770f3f74a5fad99c7cc150e9cde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '567f64e735384503b6c0172050bdfaf5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1235.759144] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655246, 'name': ReconfigVM_Task, 'duration_secs': 0.286088} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.759462] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 9450a3f2-4b2b-4022-842f-f24a8c470098/9450a3f2-4b2b-4022-842f-f24a8c470098.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1235.760323] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12f831e3-70cd-472a-a058-cf3507714ba2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.767298] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1235.767298] env[62974]: value = "task-2655247" [ 1235.767298] env[62974]: _type = "Task" [ 1235.767298] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.779378] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655247, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.917094] env[62974]: DEBUG nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1235.935110] env[62974]: DEBUG nova.network.neutron [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Successfully created port: c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1235.974972] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.995800] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bcb9fd-7683-45e6-94a3-0821780cb6a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.003354] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0f265b-39a5-4e20-ae4c-4d962b679a43 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.034474] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f22f09d-7841-4ddf-bdb5-3faae4aa1c6e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.041882] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce978e2-44f9-48b6-8bfb-efea5d8dc12d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.054968] env[62974]: DEBUG nova.compute.provider_tree [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.281088] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655247, 'name': Rename_Task, 'duration_secs': 0.160871} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.281379] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1236.281629] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84cf076d-6bcb-4af0-8760-2133dfddc5c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.288497] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1236.288497] env[62974]: value = "task-2655248" [ 1236.288497] env[62974]: _type = "Task" [ 1236.288497] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.296383] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655248, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.558108] env[62974]: DEBUG nova.scheduler.client.report [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1236.798155] env[62974]: DEBUG oslo_vmware.api [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655248, 'name': PowerOnVM_Task, 'duration_secs': 0.439915} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.798435] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1236.798659] env[62974]: INFO nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Took 7.99 seconds to spawn the instance on the hypervisor. [ 1236.798865] env[62974]: DEBUG nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1236.799651] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1750ae72-6d6b-4617-9985-626d0fcdcc89 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.929563] env[62974]: DEBUG nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1236.956696] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1236.956972] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.957156] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.957343] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.957491] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.957637] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1236.957886] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1236.958016] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1236.958193] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1236.958352] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1236.958540] env[62974]: DEBUG nova.virt.hardware [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1236.959436] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa63e756-4344-441a-8fe8-43252ee7ac66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.967484] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc8176f-30c3-4a65-8c9b-fcb47be28200 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.063024] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.157s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.063558] env[62974]: DEBUG nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1237.066911] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.093s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.068732] env[62974]: INFO nova.compute.claims [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1237.317751] env[62974]: INFO nova.compute.manager [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Took 12.72 seconds to build instance. [ 1237.491445] env[62974]: DEBUG nova.compute.manager [req-cba555d1-2397-4d89-be1e-dd1a65571e63 req-8034899e-4c70-48fb-b9bd-82de74cde864 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Received event network-vif-plugged-c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1237.491445] env[62974]: DEBUG oslo_concurrency.lockutils [req-cba555d1-2397-4d89-be1e-dd1a65571e63 req-8034899e-4c70-48fb-b9bd-82de74cde864 service nova] Acquiring lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.491445] env[62974]: DEBUG oslo_concurrency.lockutils [req-cba555d1-2397-4d89-be1e-dd1a65571e63 req-8034899e-4c70-48fb-b9bd-82de74cde864 service nova] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.491445] env[62974]: DEBUG oslo_concurrency.lockutils [req-cba555d1-2397-4d89-be1e-dd1a65571e63 req-8034899e-4c70-48fb-b9bd-82de74cde864 service nova] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.491445] env[62974]: DEBUG nova.compute.manager [req-cba555d1-2397-4d89-be1e-dd1a65571e63 req-8034899e-4c70-48fb-b9bd-82de74cde864 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] No waiting events found dispatching network-vif-plugged-c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1237.493765] env[62974]: WARNING nova.compute.manager [req-cba555d1-2397-4d89-be1e-dd1a65571e63 req-8034899e-4c70-48fb-b9bd-82de74cde864 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Received unexpected event network-vif-plugged-c5072b58-30b6-47d8-ab41-30ea057f6478 for instance with vm_state building and task_state spawning. [ 1237.568680] env[62974]: DEBUG nova.compute.utils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1237.574380] env[62974]: DEBUG nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1237.574556] env[62974]: DEBUG nova.network.neutron [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1237.592807] env[62974]: DEBUG nova.network.neutron [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Successfully updated port: c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.626679] env[62974]: DEBUG nova.policy [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c69e5ea97264d57978ddcb94ef4bc41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43dc876c8a2346c7bca249407fb7fed8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1237.820233] env[62974]: DEBUG oslo_concurrency.lockutils [None req-17053423-e719-4f6e-9db7-1d46fe9db472 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.227s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.890038] env[62974]: DEBUG nova.network.neutron [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Successfully created port: 5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1238.085844] env[62974]: DEBUG nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1238.095628] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.095861] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.096289] env[62974]: DEBUG nova.network.neutron [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1238.160892] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0ccd07-0d86-4a98-a41e-9dc6a06d4651 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.169089] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174be7da-7b5c-41b5-91c3-d8fac792c66a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.198565] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6c5a54-1ee1-43f8-94a6-9563c079908e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.205966] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a9001c-2ffe-48b3-a3e8-46da964e327f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.220300] env[62974]: DEBUG nova.compute.provider_tree [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.627606] env[62974]: DEBUG nova.network.neutron [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1238.723550] env[62974]: DEBUG nova.scheduler.client.report [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.746329] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "9450a3f2-4b2b-4022-842f-f24a8c470098" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.746566] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.746772] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "9450a3f2-4b2b-4022-842f-f24a8c470098-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.746954] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.747134] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.748989] env[62974]: INFO nova.compute.manager [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Terminating instance [ 1238.756790] env[62974]: DEBUG nova.network.neutron [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updating instance_info_cache with network_info: [{"id": "c5072b58-30b6-47d8-ab41-30ea057f6478", "address": "fa:16:3e:74:ad:aa", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5072b58-30", "ovs_interfaceid": "c5072b58-30b6-47d8-ab41-30ea057f6478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.099523] env[62974]: DEBUG nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1239.127546] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1239.127795] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1239.127961] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1239.128167] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1239.128317] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1239.128462] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1239.128669] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1239.128833] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1239.129007] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1239.129214] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1239.129391] env[62974]: DEBUG nova.virt.hardware [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1239.130267] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30f71a0-7275-4deb-ac78-48e5a3f0b696 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.139103] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251a696b-e37c-4b5e-8801-8794ec94c470 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.230809] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.164s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.231414] env[62974]: DEBUG nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1239.252546] env[62974]: DEBUG nova.compute.manager [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1239.252799] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1239.253902] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f94aa4-5cb7-4bc3-9e22-d67b875ada2d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.258990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.259344] env[62974]: DEBUG nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Instance network_info: |[{"id": "c5072b58-30b6-47d8-ab41-30ea057f6478", "address": "fa:16:3e:74:ad:aa", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5072b58-30", "ovs_interfaceid": "c5072b58-30b6-47d8-ab41-30ea057f6478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1239.259808] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:ad:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5072b58-30b6-47d8-ab41-30ea057f6478', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1239.268907] env[62974]: DEBUG oslo.service.loopingcall [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1239.271505] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1239.271843] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1239.272117] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-307853ed-b247-4620-a32f-657d9e47b039 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.289069] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9155b793-6d5b-4943-84d2-076207e531f5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.297462] env[62974]: DEBUG oslo_vmware.api [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1239.297462] env[62974]: value = "task-2655249" [ 1239.297462] env[62974]: _type = "Task" [ 1239.297462] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.298804] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1239.298804] env[62974]: value = "task-2655250" [ 1239.298804] env[62974]: _type = "Task" [ 1239.298804] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.310778] env[62974]: DEBUG oslo_vmware.api [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.313969] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655250, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.335816] env[62974]: DEBUG nova.network.neutron [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Successfully updated port: 5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1239.535244] env[62974]: DEBUG nova.compute.manager [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Received event network-changed-c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1239.535428] env[62974]: DEBUG nova.compute.manager [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Refreshing instance network info cache due to event network-changed-c5072b58-30b6-47d8-ab41-30ea057f6478. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1239.535643] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Acquiring lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.535786] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Acquired lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.535945] env[62974]: DEBUG nova.network.neutron [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Refreshing network info cache for port c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1239.737252] env[62974]: DEBUG nova.compute.utils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1239.738287] env[62974]: DEBUG nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1239.738578] env[62974]: DEBUG nova.network.neutron [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1239.774487] env[62974]: DEBUG nova.policy [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98bfa46f678b4c17bcee4da95099fc0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56a2bd854f6c413b972ccff4cfd52122', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1239.810752] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655250, 'name': CreateVM_Task, 'duration_secs': 0.317118} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.813643] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1239.813937] env[62974]: DEBUG oslo_vmware.api [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655249, 'name': PowerOffVM_Task, 'duration_secs': 0.206938} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.814807] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.814970] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.815303] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1239.815560] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1239.815721] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1239.815934] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a514aea-1caa-471a-859b-ddaf8ed6601c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.817402] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02c3c573-9c09-492d-9f19-6f838232538c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.821696] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1239.821696] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5213c4bf-ec2d-c646-7f80-72b0f72a9afb" [ 1239.821696] env[62974]: _type = "Task" [ 1239.821696] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.828693] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5213c4bf-ec2d-c646-7f80-72b0f72a9afb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.838289] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.838418] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.838562] env[62974]: DEBUG nova.network.neutron [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1239.881027] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1239.881277] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1239.881453] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Deleting the datastore file [datastore1] 9450a3f2-4b2b-4022-842f-f24a8c470098 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1239.881721] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99dbbff0-8b4c-470c-863b-bc88b54fe2c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.887654] env[62974]: DEBUG oslo_vmware.api [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for the task: (returnval){ [ 1239.887654] env[62974]: value = "task-2655252" [ 1239.887654] env[62974]: _type = "Task" [ 1239.887654] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.895331] env[62974]: DEBUG oslo_vmware.api [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.024473] env[62974]: DEBUG nova.network.neutron [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Successfully created port: 1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1240.241353] env[62974]: DEBUG nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1240.332467] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5213c4bf-ec2d-c646-7f80-72b0f72a9afb, 'name': SearchDatastore_Task, 'duration_secs': 0.009249} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.332818] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.333071] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.333305] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.333448] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.333618] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.334026] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-795b3257-72d2-4020-820a-5638ed8820ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.338238] env[62974]: DEBUG nova.network.neutron [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updated VIF entry in instance network info cache for port c5072b58-30b6-47d8-ab41-30ea057f6478. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1240.338574] env[62974]: DEBUG nova.network.neutron [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updating instance_info_cache with network_info: [{"id": "c5072b58-30b6-47d8-ab41-30ea057f6478", "address": "fa:16:3e:74:ad:aa", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5072b58-30", "ovs_interfaceid": "c5072b58-30b6-47d8-ab41-30ea057f6478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.342694] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.342904] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1240.343920] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92a98cc2-5de6-4f85-acfa-93d8396576a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.348462] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1240.348462] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c1d5a1-97cf-1a9e-9c54-a432d9f0973e" [ 1240.348462] env[62974]: _type = "Task" [ 1240.348462] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.356899] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c1d5a1-97cf-1a9e-9c54-a432d9f0973e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.376632] env[62974]: DEBUG nova.network.neutron [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1240.396174] env[62974]: DEBUG oslo_vmware.api [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Task: {'id': task-2655252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174828} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.398261] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1240.398452] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1240.398743] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1240.398784] env[62974]: INFO nova.compute.manager [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1240.399015] env[62974]: DEBUG oslo.service.loopingcall [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1240.399207] env[62974]: DEBUG nova.compute.manager [-] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1240.399299] env[62974]: DEBUG nova.network.neutron [-] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1240.585308] env[62974]: DEBUG nova.network.neutron [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updating instance_info_cache with network_info: [{"id": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "address": "fa:16:3e:91:05:8a", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5585d129-e3", "ovs_interfaceid": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.841653] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Releasing lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.841939] env[62974]: DEBUG nova.compute.manager [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Received event network-vif-plugged-5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1240.842153] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Acquiring lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.842359] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.842518] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.842717] env[62974]: DEBUG nova.compute.manager [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] No waiting events found dispatching network-vif-plugged-5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1240.842905] env[62974]: WARNING nova.compute.manager [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Received unexpected event network-vif-plugged-5585d129-e3b0-4025-8de6-1a1c14bdadc5 for instance with vm_state building and task_state spawning. [ 1240.843082] env[62974]: DEBUG nova.compute.manager [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Received event network-changed-5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1240.843235] env[62974]: DEBUG nova.compute.manager [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Refreshing instance network info cache due to event network-changed-5585d129-e3b0-4025-8de6-1a1c14bdadc5. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1240.843395] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Acquiring lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.858764] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c1d5a1-97cf-1a9e-9c54-a432d9f0973e, 'name': SearchDatastore_Task, 'duration_secs': 0.008303} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.859529] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da9e824e-b561-4128-9f85-1931e0a91c1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.864326] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1240.864326] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e03a41-31d0-087c-3937-3b94f178d7be" [ 1240.864326] env[62974]: _type = "Task" [ 1240.864326] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.871420] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e03a41-31d0-087c-3937-3b94f178d7be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.087138] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.087479] env[62974]: DEBUG nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Instance network_info: |[{"id": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "address": "fa:16:3e:91:05:8a", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5585d129-e3", "ovs_interfaceid": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1241.087789] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Acquired lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.087971] env[62974]: DEBUG nova.network.neutron [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Refreshing network info cache for port 5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1241.089187] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:05:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5585d129-e3b0-4025-8de6-1a1c14bdadc5', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1241.096603] env[62974]: DEBUG oslo.service.loopingcall [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1241.097548] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1241.097771] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e58a549c-6ca2-4b9c-bdfa-cd56bf80eac1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.117896] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1241.117896] env[62974]: value = "task-2655253" [ 1241.117896] env[62974]: _type = "Task" [ 1241.117896] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.128784] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.186434] env[62974]: DEBUG nova.network.neutron [-] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.249707] env[62974]: DEBUG nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1241.277501] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1241.277835] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.278043] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1241.278254] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.278415] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1241.278564] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1241.278781] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1241.281028] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1241.281028] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1241.281028] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1241.281028] env[62974]: DEBUG nova.virt.hardware [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1241.281028] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9eb72d-5b0e-4dd5-9e11-64125aebb7e0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.288337] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacfa384-f73d-4816-b35e-9cdb6e923348 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.374462] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e03a41-31d0-087c-3937-3b94f178d7be, 'name': SearchDatastore_Task, 'duration_secs': 0.009179} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.374712] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.374967] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be/21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1241.375239] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8954ed0-675e-4bcb-85a8-1de380eef570 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.381742] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1241.381742] env[62974]: value = "task-2655254" [ 1241.381742] env[62974]: _type = "Task" [ 1241.381742] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.389153] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655254, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.567292] env[62974]: DEBUG nova.compute.manager [req-7e061792-4e89-4bce-a197-252e31e2dbc9 req-330cbe98-cbea-4f33-b80b-06ba352bfd80 service nova] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Received event network-vif-deleted-3e3d7b6a-32ca-4958-ac57-63fd0e6971aa {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1241.567495] env[62974]: DEBUG nova.compute.manager [req-7e061792-4e89-4bce-a197-252e31e2dbc9 req-330cbe98-cbea-4f33-b80b-06ba352bfd80 service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Received event network-vif-plugged-1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1241.567681] env[62974]: DEBUG oslo_concurrency.lockutils [req-7e061792-4e89-4bce-a197-252e31e2dbc9 req-330cbe98-cbea-4f33-b80b-06ba352bfd80 service nova] Acquiring lock "01d0c91c-1724-453c-8d83-8f9e77afcef1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.567878] env[62974]: DEBUG oslo_concurrency.lockutils [req-7e061792-4e89-4bce-a197-252e31e2dbc9 req-330cbe98-cbea-4f33-b80b-06ba352bfd80 service nova] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.568051] env[62974]: DEBUG oslo_concurrency.lockutils [req-7e061792-4e89-4bce-a197-252e31e2dbc9 req-330cbe98-cbea-4f33-b80b-06ba352bfd80 service nova] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.568219] env[62974]: DEBUG nova.compute.manager [req-7e061792-4e89-4bce-a197-252e31e2dbc9 req-330cbe98-cbea-4f33-b80b-06ba352bfd80 service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] No waiting events found dispatching network-vif-plugged-1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1241.568383] env[62974]: WARNING nova.compute.manager [req-7e061792-4e89-4bce-a197-252e31e2dbc9 req-330cbe98-cbea-4f33-b80b-06ba352bfd80 service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Received unexpected event network-vif-plugged-1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 for instance with vm_state building and task_state spawning. [ 1241.627561] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.648031] env[62974]: DEBUG nova.network.neutron [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Successfully updated port: 1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1241.689585] env[62974]: INFO nova.compute.manager [-] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Took 1.29 seconds to deallocate network for instance. [ 1241.799095] env[62974]: DEBUG nova.network.neutron [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updated VIF entry in instance network info cache for port 5585d129-e3b0-4025-8de6-1a1c14bdadc5. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1241.799466] env[62974]: DEBUG nova.network.neutron [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updating instance_info_cache with network_info: [{"id": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "address": "fa:16:3e:91:05:8a", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5585d129-e3", "ovs_interfaceid": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.891100] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655254, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.127849] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.150656] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "refresh_cache-01d0c91c-1724-453c-8d83-8f9e77afcef1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.150792] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquired lock "refresh_cache-01d0c91c-1724-453c-8d83-8f9e77afcef1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.150968] env[62974]: DEBUG nova.network.neutron [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1242.198957] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.199248] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.199462] env[62974]: DEBUG nova.objects.instance [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lazy-loading 'resources' on Instance uuid 9450a3f2-4b2b-4022-842f-f24a8c470098 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.301882] env[62974]: DEBUG oslo_concurrency.lockutils [req-5ba81bf8-0e06-4fc6-990b-6c72be4ee1cb req-e3805c78-a385-42d4-b906-bd9b97570197 service nova] Releasing lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.391754] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655254, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.628610] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.685130] env[62974]: DEBUG nova.network.neutron [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1242.769351] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286d5f0a-ed93-4aab-9421-c6be40a306a6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.776423] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667d3099-86de-45a9-88c6-bac24e8b505c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.807834] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b20d2e9-326f-41b1-95d1-55ea81635981 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.815063] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731f51cf-eb81-4758-b38a-892262d35b4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.829331] env[62974]: DEBUG nova.compute.provider_tree [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.839509] env[62974]: DEBUG nova.network.neutron [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Updating instance_info_cache with network_info: [{"id": "1ea7a36a-46fc-4bc9-b9d2-f6f577a09526", "address": "fa:16:3e:c8:02:f7", "network": {"id": "74dafbbd-eb53-4f4f-a033-d1b6501c82e7", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1299893572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56a2bd854f6c413b972ccff4cfd52122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ea7a36a-46", "ovs_interfaceid": "1ea7a36a-46fc-4bc9-b9d2-f6f577a09526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.892332] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655254, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.359034} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.892576] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be/21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1242.892819] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1242.893069] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77e18b87-04e8-4719-8066-ba1d2609615e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.899193] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1242.899193] env[62974]: value = "task-2655255" [ 1242.899193] env[62974]: _type = "Task" [ 1242.899193] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.906912] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655255, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.129190] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.332613] env[62974]: DEBUG nova.scheduler.client.report [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1243.341541] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Releasing lock "refresh_cache-01d0c91c-1724-453c-8d83-8f9e77afcef1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.341824] env[62974]: DEBUG nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Instance network_info: |[{"id": "1ea7a36a-46fc-4bc9-b9d2-f6f577a09526", "address": "fa:16:3e:c8:02:f7", "network": {"id": "74dafbbd-eb53-4f4f-a033-d1b6501c82e7", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1299893572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56a2bd854f6c413b972ccff4cfd52122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ea7a36a-46", "ovs_interfaceid": "1ea7a36a-46fc-4bc9-b9d2-f6f577a09526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1243.342220] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:02:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ea7a36a-46fc-4bc9-b9d2-f6f577a09526', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.349897] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Creating folder: Project (56a2bd854f6c413b972ccff4cfd52122). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.350853] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22a939e3-3357-4c4e-80a9-5d40f213c17a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.361637] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Created folder: Project (56a2bd854f6c413b972ccff4cfd52122) in parent group-v535199. [ 1243.361812] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Creating folder: Instances. Parent ref: group-v535519. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.362038] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89b45b37-28df-4b8b-b003-e9f60d5bf93d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.371170] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Created folder: Instances in parent group-v535519. [ 1243.371391] env[62974]: DEBUG oslo.service.loopingcall [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1243.371568] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1243.371759] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1ca382a-5fec-4625-a089-aa325ff3db9b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.390011] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1243.390011] env[62974]: value = "task-2655258" [ 1243.390011] env[62974]: _type = "Task" [ 1243.390011] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.396857] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655258, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.406761] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655255, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064989} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.407010] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1243.407808] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218d4679-f137-43d3-ae79-ddaad7cf07ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.429017] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be/21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1243.429254] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e30c427e-dd0e-459d-92cd-18cb03a1d3bc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.448500] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1243.448500] env[62974]: value = "task-2655259" [ 1243.448500] env[62974]: _type = "Task" [ 1243.448500] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.457981] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655259, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.594907] env[62974]: DEBUG nova.compute.manager [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Received event network-changed-1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1243.595301] env[62974]: DEBUG nova.compute.manager [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Refreshing instance network info cache due to event network-changed-1ea7a36a-46fc-4bc9-b9d2-f6f577a09526. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1243.595630] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] Acquiring lock "refresh_cache-01d0c91c-1724-453c-8d83-8f9e77afcef1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.595836] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] Acquired lock "refresh_cache-01d0c91c-1724-453c-8d83-8f9e77afcef1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.596061] env[62974]: DEBUG nova.network.neutron [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Refreshing network info cache for port 1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1243.629683] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.837624] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.854871] env[62974]: INFO nova.scheduler.client.report [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Deleted allocations for instance 9450a3f2-4b2b-4022-842f-f24a8c470098 [ 1243.902664] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655258, 'name': CreateVM_Task, 'duration_secs': 0.362689} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.902861] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1243.903510] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.903672] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.904054] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1243.904306] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13888c06-968f-4219-bd69-44c3df8b3e7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.909018] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1243.909018] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52115249-5a56-ee68-1487-056c832b17fe" [ 1243.909018] env[62974]: _type = "Task" [ 1243.909018] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.915999] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52115249-5a56-ee68-1487-056c832b17fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.958360] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655259, 'name': ReconfigVM_Task, 'duration_secs': 0.315226} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.958630] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be/21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1243.959267] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06430506-6c19-4ccb-b3e3-2f3df12d13be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.965857] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1243.965857] env[62974]: value = "task-2655260" [ 1243.965857] env[62974]: _type = "Task" [ 1243.965857] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.973389] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655260, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.129257] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.282424] env[62974]: DEBUG nova.network.neutron [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Updated VIF entry in instance network info cache for port 1ea7a36a-46fc-4bc9-b9d2-f6f577a09526. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1244.282829] env[62974]: DEBUG nova.network.neutron [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Updating instance_info_cache with network_info: [{"id": "1ea7a36a-46fc-4bc9-b9d2-f6f577a09526", "address": "fa:16:3e:c8:02:f7", "network": {"id": "74dafbbd-eb53-4f4f-a033-d1b6501c82e7", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1299893572-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56a2bd854f6c413b972ccff4cfd52122", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ea7a36a-46", "ovs_interfaceid": "1ea7a36a-46fc-4bc9-b9d2-f6f577a09526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.361707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d7f76ad4-51b5-4608-9d11-c484de582b64 tempest-ServerMetadataTestJSON-1940932282 tempest-ServerMetadataTestJSON-1940932282-project-member] Lock "9450a3f2-4b2b-4022-842f-f24a8c470098" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.615s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.419105] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52115249-5a56-ee68-1487-056c832b17fe, 'name': SearchDatastore_Task, 'duration_secs': 0.00955} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.419386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.419612] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1244.419840] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.419981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.420170] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.420416] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64376751-e458-4b6e-81e6-a3eb05f00e67 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.428515] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.428701] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1244.429447] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b346467-5bd0-4044-9d4d-e16be75af7db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.434352] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1244.434352] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52109250-b6f8-2617-c768-647891d88f40" [ 1244.434352] env[62974]: _type = "Task" [ 1244.434352] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.441637] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52109250-b6f8-2617-c768-647891d88f40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.474099] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655260, 'name': Rename_Task, 'duration_secs': 0.140435} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.474381] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1244.474580] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cca1659-d5e2-45fa-a30b-97457fd7f6df {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.479991] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1244.479991] env[62974]: value = "task-2655261" [ 1244.479991] env[62974]: _type = "Task" [ 1244.479991] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.491291] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.630983] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.786492] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a990cea-d97a-4e64-b1fd-b08d1de1b874 req-2af355e6-0ec9-4903-901e-4223d479e24e service nova] Releasing lock "refresh_cache-01d0c91c-1724-453c-8d83-8f9e77afcef1" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.945024] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52109250-b6f8-2617-c768-647891d88f40, 'name': SearchDatastore_Task, 'duration_secs': 0.016179} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.945817] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9d4045a-8892-4667-b722-21bb64c9508d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.951379] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1244.951379] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528b8397-a517-b99e-aa25-8efbfb93b9bf" [ 1244.951379] env[62974]: _type = "Task" [ 1244.951379] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.959212] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528b8397-a517-b99e-aa25-8efbfb93b9bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.989081] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655261, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.132812] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.464794] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528b8397-a517-b99e-aa25-8efbfb93b9bf, 'name': SearchDatastore_Task, 'duration_secs': 0.020796} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.465150] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.465315] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 01d0c91c-1724-453c-8d83-8f9e77afcef1/01d0c91c-1724-453c-8d83-8f9e77afcef1.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1245.465570] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0debd6f-53eb-49ff-9b8a-9a3f7dac9576 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.472825] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1245.472825] env[62974]: value = "task-2655262" [ 1245.472825] env[62974]: _type = "Task" [ 1245.472825] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.482169] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.490620] env[62974]: DEBUG oslo_vmware.api [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655261, 'name': PowerOnVM_Task, 'duration_secs': 0.524561} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.490890] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1245.491183] env[62974]: INFO nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1245.491385] env[62974]: DEBUG nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1245.492165] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c912efc-6704-488d-8e22-e33dab00dcda {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.635783] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655253, 'name': CreateVM_Task, 'duration_secs': 4.097117} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.635975] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1245.636732] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.636902] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.637252] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1245.638403] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-173a3cb7-00bc-472d-b03a-02cfb3399bc1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.644414] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1245.644414] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52adcc9b-ccf9-1dc1-5717-d0bc073faa52" [ 1245.644414] env[62974]: _type = "Task" [ 1245.644414] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.653826] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52adcc9b-ccf9-1dc1-5717-d0bc073faa52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.983264] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496438} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.983521] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 01d0c91c-1724-453c-8d83-8f9e77afcef1/01d0c91c-1724-453c-8d83-8f9e77afcef1.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1245.983767] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1245.984050] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40f0a1ce-aca3-435e-b93e-f6a8222035f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.990588] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1245.990588] env[62974]: value = "task-2655263" [ 1245.990588] env[62974]: _type = "Task" [ 1245.990588] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.998769] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655263, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.009024] env[62974]: INFO nova.compute.manager [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Took 14.28 seconds to build instance. [ 1246.155487] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52adcc9b-ccf9-1dc1-5717-d0bc073faa52, 'name': SearchDatastore_Task, 'duration_secs': 0.056769} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.155669] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.155897] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1246.156362] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.156362] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.156504] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1246.156738] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ac77c6b-eaab-49f2-985b-7a3aff0e1403 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.165691] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1246.166700] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1246.167860] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-669ee3d4-bc72-4084-9690-618ae393e77b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.173779] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1246.173779] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]525b9789-d395-1e72-1dfc-0b611fe51d8a" [ 1246.173779] env[62974]: _type = "Task" [ 1246.173779] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.182431] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525b9789-d395-1e72-1dfc-0b611fe51d8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.500039] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655263, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064049} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.500371] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1246.504022] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa8eb85-3916-47df-bebf-fd47f393c54d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.514792] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b4960571-2ff0-45c4-bf7a-d51d1cee667e tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.793s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.524610] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 01d0c91c-1724-453c-8d83-8f9e77afcef1/01d0c91c-1724-453c-8d83-8f9e77afcef1.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1246.524928] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6673151-b3fb-4048-ae8f-6cc4e9afea8e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.545240] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1246.545240] env[62974]: value = "task-2655264" [ 1246.545240] env[62974]: _type = "Task" [ 1246.545240] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.554987] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655264, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.686902] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]525b9789-d395-1e72-1dfc-0b611fe51d8a, 'name': SearchDatastore_Task, 'duration_secs': 0.036216} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.686902] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55aefdd6-98b5-4b8f-933c-6e05622570c0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.692711] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1246.692711] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dff716-57d1-9e2a-3795-68a8fbaac6c9" [ 1246.692711] env[62974]: _type = "Task" [ 1246.692711] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.702570] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dff716-57d1-9e2a-3795-68a8fbaac6c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.741960] env[62974]: DEBUG nova.compute.manager [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Received event network-changed-c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1246.742167] env[62974]: DEBUG nova.compute.manager [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Refreshing instance network info cache due to event network-changed-c5072b58-30b6-47d8-ab41-30ea057f6478. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1246.742371] env[62974]: DEBUG oslo_concurrency.lockutils [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] Acquiring lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.742505] env[62974]: DEBUG oslo_concurrency.lockutils [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] Acquired lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.742671] env[62974]: DEBUG nova.network.neutron [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Refreshing network info cache for port c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1247.055557] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655264, 'name': ReconfigVM_Task, 'duration_secs': 0.278396} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.055917] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 01d0c91c-1724-453c-8d83-8f9e77afcef1/01d0c91c-1724-453c-8d83-8f9e77afcef1.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1247.056587] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ed931a7-6870-4978-8c27-d26f2e9563e6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.062235] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1247.062235] env[62974]: value = "task-2655265" [ 1247.062235] env[62974]: _type = "Task" [ 1247.062235] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.069548] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655265, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.202659] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52dff716-57d1-9e2a-3795-68a8fbaac6c9, 'name': SearchDatastore_Task, 'duration_secs': 0.014974} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.202971] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.203375] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e71134bd-23a1-4cc3-9e85-e8b6054be6d5/e71134bd-23a1-4cc3-9e85-e8b6054be6d5.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1247.203649] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cbb6b6e-448d-4d94-9a0e-07233ec0c5f8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.210220] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1247.210220] env[62974]: value = "task-2655266" [ 1247.210220] env[62974]: _type = "Task" [ 1247.210220] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.218044] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.453148] env[62974]: DEBUG nova.network.neutron [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updated VIF entry in instance network info cache for port c5072b58-30b6-47d8-ab41-30ea057f6478. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.453525] env[62974]: DEBUG nova.network.neutron [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updating instance_info_cache with network_info: [{"id": "c5072b58-30b6-47d8-ab41-30ea057f6478", "address": "fa:16:3e:74:ad:aa", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5072b58-30", "ovs_interfaceid": "c5072b58-30b6-47d8-ab41-30ea057f6478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.572432] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655265, 'name': Rename_Task, 'duration_secs': 0.128344} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.572838] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1247.573114] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc430d2a-db2b-4d2e-a9fd-d5efa887a02c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.579836] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1247.579836] env[62974]: value = "task-2655267" [ 1247.579836] env[62974]: _type = "Task" [ 1247.579836] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.587929] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655267, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.720014] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655266, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.956440] env[62974]: DEBUG oslo_concurrency.lockutils [req-dfb931db-ff71-42b7-86c2-d6fbe4b78fe3 req-fc6f67d4-3bec-44b4-818c-8496c0c0281f service nova] Releasing lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.090247] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655267, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.220303] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655266, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663086} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.220542] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] e71134bd-23a1-4cc3-9e85-e8b6054be6d5/e71134bd-23a1-4cc3-9e85-e8b6054be6d5.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1248.220785] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1248.221047] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fb1de18-65a6-43e4-856c-8f1cddc93873 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.228342] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1248.228342] env[62974]: value = "task-2655268" [ 1248.228342] env[62974]: _type = "Task" [ 1248.228342] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.236700] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655268, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.594152] env[62974]: DEBUG oslo_vmware.api [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655267, 'name': PowerOnVM_Task, 'duration_secs': 0.733146} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.594483] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1248.594767] env[62974]: INFO nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Took 7.34 seconds to spawn the instance on the hypervisor. [ 1248.595044] env[62974]: DEBUG nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1248.596099] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8214f2e4-fa8a-415d-90b7-2215619f1981 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.737732] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655268, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106631} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.737999] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1248.738780] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4c6854-ab2d-4cb9-9bd1-4f0dc69d4213 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.760147] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] e71134bd-23a1-4cc3-9e85-e8b6054be6d5/e71134bd-23a1-4cc3-9e85-e8b6054be6d5.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.760396] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc91f815-0836-4ee4-9831-644fa60b3775 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.779421] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1248.779421] env[62974]: value = "task-2655269" [ 1248.779421] env[62974]: _type = "Task" [ 1248.779421] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.787094] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655269, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.116018] env[62974]: INFO nova.compute.manager [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Took 13.16 seconds to build instance. [ 1249.289496] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655269, 'name': ReconfigVM_Task, 'duration_secs': 0.350623} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.289758] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Reconfigured VM instance instance-00000075 to attach disk [datastore1] e71134bd-23a1-4cc3-9e85-e8b6054be6d5/e71134bd-23a1-4cc3-9e85-e8b6054be6d5.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.290443] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39e06326-27e8-4a60-9453-69da02f4e687 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.296902] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1249.296902] env[62974]: value = "task-2655270" [ 1249.296902] env[62974]: _type = "Task" [ 1249.296902] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.303925] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655270, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.618756] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3ad7af24-0f7f-4734-9d01-e41003ed3c48 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.668s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.806898] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655270, 'name': Rename_Task, 'duration_secs': 0.137958} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.807180] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1249.807425] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84969d91-c54c-4942-b2cb-1b9dd0c9f589 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.813371] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1249.813371] env[62974]: value = "task-2655271" [ 1249.813371] env[62974]: _type = "Task" [ 1249.813371] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.820678] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.223014] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "01d0c91c-1724-453c-8d83-8f9e77afcef1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.223263] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.223479] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "01d0c91c-1724-453c-8d83-8f9e77afcef1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.223661] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.223829] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.225987] env[62974]: INFO nova.compute.manager [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Terminating instance [ 1250.324525] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655271, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.729683] env[62974]: DEBUG nova.compute.manager [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1250.730087] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1250.730944] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa9e1a9-985c-4b9a-81dd-a40d84d0ae05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.739405] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1250.739688] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c0b86db-db8c-4f6a-b3f7-11c2c0d3591e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.745849] env[62974]: DEBUG oslo_vmware.api [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1250.745849] env[62974]: value = "task-2655272" [ 1250.745849] env[62974]: _type = "Task" [ 1250.745849] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.754168] env[62974]: DEBUG oslo_vmware.api [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.823741] env[62974]: DEBUG oslo_vmware.api [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655271, 'name': PowerOnVM_Task, 'duration_secs': 0.655638} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.824018] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1250.824198] env[62974]: INFO nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Took 11.72 seconds to spawn the instance on the hypervisor. [ 1250.824383] env[62974]: DEBUG nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1250.825184] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065549bf-6127-4b29-9e49-5cea13bff4d3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.257782] env[62974]: DEBUG oslo_vmware.api [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655272, 'name': PowerOffVM_Task, 'duration_secs': 0.191268} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.257782] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1251.257782] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1251.257782] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fe88c39-a9f8-4301-bd3a-0da1b3366365 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.321026] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1251.321026] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1251.321026] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Deleting the datastore file [datastore1] 01d0c91c-1724-453c-8d83-8f9e77afcef1 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1251.321026] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db001f99-a8a2-4b86-8166-1e876c8576d1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.327863] env[62974]: DEBUG oslo_vmware.api [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for the task: (returnval){ [ 1251.327863] env[62974]: value = "task-2655274" [ 1251.327863] env[62974]: _type = "Task" [ 1251.327863] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.341301] env[62974]: DEBUG oslo_vmware.api [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.345876] env[62974]: INFO nova.compute.manager [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Took 17.32 seconds to build instance. [ 1251.483737] env[62974]: DEBUG nova.compute.manager [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Received event network-changed-5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1251.483959] env[62974]: DEBUG nova.compute.manager [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Refreshing instance network info cache due to event network-changed-5585d129-e3b0-4025-8de6-1a1c14bdadc5. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1251.484204] env[62974]: DEBUG oslo_concurrency.lockutils [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] Acquiring lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.484350] env[62974]: DEBUG oslo_concurrency.lockutils [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] Acquired lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.484510] env[62974]: DEBUG nova.network.neutron [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Refreshing network info cache for port 5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1251.838721] env[62974]: DEBUG oslo_vmware.api [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Task: {'id': task-2655274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205048} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.839136] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1251.839507] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1251.839791] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1251.840018] env[62974]: INFO nova.compute.manager [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1251.840275] env[62974]: DEBUG oslo.service.loopingcall [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1251.840472] env[62974]: DEBUG nova.compute.manager [-] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1251.840569] env[62974]: DEBUG nova.network.neutron [-] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1251.847707] env[62974]: DEBUG oslo_concurrency.lockutils [None req-799314b7-f64d-45e8-a5f2-30d903a014f2 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.835s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.153156] env[62974]: DEBUG nova.compute.manager [req-1fceba04-5726-43aa-8819-4672fa179da7 req-ecb01475-5986-4857-9624-dbe76725f496 service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Received event network-vif-deleted-1ea7a36a-46fc-4bc9-b9d2-f6f577a09526 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1252.153156] env[62974]: INFO nova.compute.manager [req-1fceba04-5726-43aa-8819-4672fa179da7 req-ecb01475-5986-4857-9624-dbe76725f496 service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Neutron deleted interface 1ea7a36a-46fc-4bc9-b9d2-f6f577a09526; detaching it from the instance and deleting it from the info cache [ 1252.153156] env[62974]: DEBUG nova.network.neutron [req-1fceba04-5726-43aa-8819-4672fa179da7 req-ecb01475-5986-4857-9624-dbe76725f496 service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.253091] env[62974]: DEBUG nova.network.neutron [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updated VIF entry in instance network info cache for port 5585d129-e3b0-4025-8de6-1a1c14bdadc5. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1252.253463] env[62974]: DEBUG nova.network.neutron [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updating instance_info_cache with network_info: [{"id": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "address": "fa:16:3e:91:05:8a", "network": {"id": "7210ac6e-daa6-41c4-b6fe-300d58cd435b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-486833181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43dc876c8a2346c7bca249407fb7fed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5585d129-e3", "ovs_interfaceid": "5585d129-e3b0-4025-8de6-1a1c14bdadc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.624810] env[62974]: DEBUG nova.network.neutron [-] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.656631] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24d0244d-524f-4024-b08a-fd78c903fdd5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.666700] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c04e0d1-6136-4185-9ba6-9d0049dde888 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.692308] env[62974]: DEBUG nova.compute.manager [req-1fceba04-5726-43aa-8819-4672fa179da7 req-ecb01475-5986-4857-9624-dbe76725f496 service nova] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Detach interface failed, port_id=1ea7a36a-46fc-4bc9-b9d2-f6f577a09526, reason: Instance 01d0c91c-1724-453c-8d83-8f9e77afcef1 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1252.756397] env[62974]: DEBUG oslo_concurrency.lockutils [req-1ee55d07-7f84-433d-9e01-3e477bf2038a req-f64d6698-b699-4ac7-af7d-234915283e09 service nova] Releasing lock "refresh_cache-e71134bd-23a1-4cc3-9e85-e8b6054be6d5" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.127409] env[62974]: INFO nova.compute.manager [-] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Took 1.29 seconds to deallocate network for instance. [ 1253.634969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.635213] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.635433] env[62974]: DEBUG nova.objects.instance [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lazy-loading 'resources' on Instance uuid 01d0c91c-1724-453c-8d83-8f9e77afcef1 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1254.195736] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce51dd96-aa46-4f49-a3bf-f1fdb33356d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.203312] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e52fe3-9460-44fa-84a6-002ac0701c7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.233182] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21809169-3350-492a-bfcd-af14060a10a2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.240220] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3422f90-3396-479f-bc1d-63bfca0d39c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.254233] env[62974]: DEBUG nova.compute.provider_tree [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.757855] env[62974]: DEBUG nova.scheduler.client.report [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1255.262957] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.283954] env[62974]: INFO nova.scheduler.client.report [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Deleted allocations for instance 01d0c91c-1724-453c-8d83-8f9e77afcef1 [ 1255.792647] env[62974]: DEBUG oslo_concurrency.lockutils [None req-9cab4feb-c9c5-4954-bab4-09741eba1012 tempest-InstanceActionsV221TestJSON-961207877 tempest-InstanceActionsV221TestJSON-961207877-project-member] Lock "01d0c91c-1724-453c-8d83-8f9e77afcef1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.569s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.239050] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.239050] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.239050] env[62974]: DEBUG nova.compute.manager [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1283.239605] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9dcf06-2485-4315-b003-8cf330fcc87d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.246342] env[62974]: DEBUG nova.compute.manager [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1283.246985] env[62974]: DEBUG nova.objects.instance [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'flavor' on Instance uuid 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1284.254687] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1284.255065] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4e3af49-776e-4800-bbe1-e6c389f794c8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.261641] env[62974]: DEBUG oslo_vmware.api [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1284.261641] env[62974]: value = "task-2655275" [ 1284.261641] env[62974]: _type = "Task" [ 1284.261641] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.269773] env[62974]: DEBUG oslo_vmware.api [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655275, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.771665] env[62974]: DEBUG oslo_vmware.api [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655275, 'name': PowerOffVM_Task, 'duration_secs': 0.253347} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.771910] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1284.772116] env[62974]: DEBUG nova.compute.manager [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1284.772845] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293ba180-30c2-4927-9b22-3eefc9a6c2b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.284458] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8db24c6c-5b78-4179-af9f-d9dad9a5b406 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.045s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.599595] env[62974]: DEBUG nova.objects.instance [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'flavor' on Instance uuid 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.105188] env[62974]: DEBUG oslo_concurrency.lockutils [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.105393] env[62974]: DEBUG oslo_concurrency.lockutils [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.105523] env[62974]: DEBUG nova.network.neutron [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.105694] env[62974]: DEBUG nova.objects.instance [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'info_cache' on Instance uuid 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.609540] env[62974]: DEBUG nova.objects.base [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Object Instance<21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be> lazy-loaded attributes: flavor,info_cache {{(pid=62974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1287.319638] env[62974]: DEBUG nova.network.neutron [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updating instance_info_cache with network_info: [{"id": "c5072b58-30b6-47d8-ab41-30ea057f6478", "address": "fa:16:3e:74:ad:aa", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5072b58-30", "ovs_interfaceid": "c5072b58-30b6-47d8-ab41-30ea057f6478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.825042] env[62974]: DEBUG oslo_concurrency.lockutils [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.830362] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1288.830765] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3a50ceb-ffc7-4e86-9d57-23b3333a310a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.837806] env[62974]: DEBUG oslo_vmware.api [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1288.837806] env[62974]: value = "task-2655276" [ 1288.837806] env[62974]: _type = "Task" [ 1288.837806] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.845433] env[62974]: DEBUG oslo_vmware.api [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655276, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.115131] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.115377] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.349326] env[62974]: DEBUG oslo_vmware.api [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655276, 'name': PowerOnVM_Task, 'duration_secs': 0.350008} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.349546] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1289.349730] env[62974]: DEBUG nova.compute.manager [None req-856a6b00-b210-4aa2-bd90-8520a6d5231d tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1289.350491] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262ad067-860d-4ff9-bf92-f0893bff2d9a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.619014] env[62974]: DEBUG nova.compute.utils [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1290.122278] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.337124] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16f6594-256f-4295-8781-0d2ba24c3a2c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.343788] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-40c8e6a0-9582-4e42-b799-47de2c943c4f tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Suspending the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1290.344016] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a7853b47-2e61-4077-9338-2384cae76f83 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.350727] env[62974]: DEBUG oslo_vmware.api [None req-40c8e6a0-9582-4e42-b799-47de2c943c4f tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1290.350727] env[62974]: value = "task-2655277" [ 1290.350727] env[62974]: _type = "Task" [ 1290.350727] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.358529] env[62974]: DEBUG oslo_vmware.api [None req-40c8e6a0-9582-4e42-b799-47de2c943c4f tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655277, 'name': SuspendVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.861726] env[62974]: DEBUG oslo_vmware.api [None req-40c8e6a0-9582-4e42-b799-47de2c943c4f tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655277, 'name': SuspendVM_Task} progress is 75%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.187498] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.187906] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.187906] env[62974]: INFO nova.compute.manager [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Attaching volume d2e0787e-8f43-42fb-a530-768320c515c7 to /dev/sdb [ 1291.217803] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0f5495-a7a8-4660-b39c-db34becad25f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.224990] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9146ccf-8cf3-4ab9-b4e6-dddd5faa2e1d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.237687] env[62974]: DEBUG nova.virt.block_device [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updating existing volume attachment record: 9e4c3494-63ff-4e17-ac10-8e4e761090fb {{(pid=62974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1291.361650] env[62974]: DEBUG oslo_vmware.api [None req-40c8e6a0-9582-4e42-b799-47de2c943c4f tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655277, 'name': SuspendVM_Task, 'duration_secs': 0.636094} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.361920] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-40c8e6a0-9582-4e42-b799-47de2c943c4f tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Suspended the VM {{(pid=62974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1291.362124] env[62974]: DEBUG nova.compute.manager [None req-40c8e6a0-9582-4e42-b799-47de2c943c4f tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1291.362940] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481ed294-02ba-4599-86c5-7e4aeaf11a7a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.649994] env[62974]: INFO nova.compute.manager [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Resuming [ 1292.650664] env[62974]: DEBUG nova.objects.instance [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'flavor' on Instance uuid 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.758389] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.758611] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.758747] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1293.763676] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Didn't find any instances for network info cache update. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1293.764089] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.764089] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.764274] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.764329] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.764480] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.764636] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.764768] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1293.764910] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.161734] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.161969] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquired lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.162050] env[62974]: DEBUG nova.network.neutron [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1294.268409] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.268652] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.268794] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.268946] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1294.269854] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8493321-e403-4d40-a386-646029f0258a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.277794] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7998097e-b77d-4e00-b741-0584e2ee9f03 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.291015] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbdfccb-ddc4-4930-9a35-6f3ca84e0e66 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.296867] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dd08ea-585f-4e88-a1d0-58cd4f557f14 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.327021] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180761MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1294.327021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.327021] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.856954] env[62974]: DEBUG nova.network.neutron [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updating instance_info_cache with network_info: [{"id": "c5072b58-30b6-47d8-ab41-30ea057f6478", "address": "fa:16:3e:74:ad:aa", "network": {"id": "ad460538-69cd-4506-bc1b-0fc89848a0d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2071330474-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "567f64e735384503b6c0172050bdfaf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5072b58-30", "ovs_interfaceid": "c5072b58-30b6-47d8-ab41-30ea057f6478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.352529] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1295.352689] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance e71134bd-23a1-4cc3-9e85-e8b6054be6d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1295.352863] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1295.353013] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1295.359835] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Releasing lock "refresh_cache-21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.361173] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3698ca-2a5c-4ffb-ab17-58f66bb744db {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.368831] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Resuming the VM {{(pid=62974) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1295.371092] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27d60a66-cd7a-4976-864b-5eced2f13f17 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.377108] env[62974]: DEBUG oslo_vmware.api [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1295.377108] env[62974]: value = "task-2655280" [ 1295.377108] env[62974]: _type = "Task" [ 1295.377108] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.385811] env[62974]: DEBUG oslo_vmware.api [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.391628] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e069131-e800-4ba0-a11d-add1d3288c5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.398069] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfde6643-3853-4264-9cf8-c9cd92a84951 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.429891] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50e74e3-7e3f-4f8f-8888-56f9c75f69fd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.436866] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357a91f5-882a-4415-8af5-1275b3eb4588 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.449673] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1295.780398] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Volume attach. Driver type: vmdk {{(pid=62974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1295.780643] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535522', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'name': 'volume-d2e0787e-8f43-42fb-a530-768320c515c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e71134bd-23a1-4cc3-9e85-e8b6054be6d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'serial': 'd2e0787e-8f43-42fb-a530-768320c515c7'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1295.781650] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15845a3a-4080-4671-8309-54ea28740a5a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.798323] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d001fa-363b-4e3d-bc52-c8bcde336880 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.824164] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] volume-d2e0787e-8f43-42fb-a530-768320c515c7/volume-d2e0787e-8f43-42fb-a530-768320c515c7.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.824762] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b81c5123-cb3a-41aa-aca9-00b483ccb4e1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.844312] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1295.844312] env[62974]: value = "task-2655281" [ 1295.844312] env[62974]: _type = "Task" [ 1295.844312] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.855432] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.887284] env[62974]: DEBUG oslo_vmware.api [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655280, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.952875] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1296.355022] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.386771] env[62974]: DEBUG oslo_vmware.api [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655280, 'name': PowerOnVM_Task, 'duration_secs': 0.513496} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.387068] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Resumed the VM {{(pid=62974) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1296.387280] env[62974]: DEBUG nova.compute.manager [None req-1f436e63-6f3e-4482-8b73-b28e2fc584d9 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1296.388100] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63b35d8-4a5f-4d90-ba95-74ffbfad396c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.458272] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1296.458488] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.132s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.855427] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655281, 'name': ReconfigVM_Task, 'duration_secs': 0.644207} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.855806] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Reconfigured VM instance instance-00000075 to attach disk [datastore2] volume-d2e0787e-8f43-42fb-a530-768320c515c7/volume-d2e0787e-8f43-42fb-a530-768320c515c7.vmdk or device None with type thin {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1296.860475] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f047dd8b-cb3c-456e-a680-d58e7e130da9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.874917] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1296.874917] env[62974]: value = "task-2655282" [ 1296.874917] env[62974]: _type = "Task" [ 1296.874917] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.884898] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655282, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.385578] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655282, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.510620] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.510881] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.511110] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.511310] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.511498] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.513563] env[62974]: INFO nova.compute.manager [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Terminating instance [ 1297.886097] env[62974]: DEBUG oslo_vmware.api [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655282, 'name': ReconfigVM_Task, 'duration_secs': 0.517392} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.886424] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535522', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'name': 'volume-d2e0787e-8f43-42fb-a530-768320c515c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e71134bd-23a1-4cc3-9e85-e8b6054be6d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'serial': 'd2e0787e-8f43-42fb-a530-768320c515c7'} {{(pid=62974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1298.017790] env[62974]: DEBUG nova.compute.manager [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1298.018063] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1298.019350] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206c07ac-c443-4fff-8a9d-c988a3e5e693 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.027101] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1298.027585] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21c11df7-bf59-415f-9dc9-68b4a97fa2dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.033598] env[62974]: DEBUG oslo_vmware.api [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1298.033598] env[62974]: value = "task-2655283" [ 1298.033598] env[62974]: _type = "Task" [ 1298.033598] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.041604] env[62974]: DEBUG oslo_vmware.api [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.543257] env[62974]: DEBUG oslo_vmware.api [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655283, 'name': PowerOffVM_Task, 'duration_secs': 0.183147} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.543644] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1298.543846] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1298.544153] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c98d8b62-aa7d-4ff0-bdc9-b5f31e18cdae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.612744] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1298.613058] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1298.613319] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleting the datastore file [datastore1] 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1298.613622] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3de784ac-db6b-4d38-9fab-7ce8c94a302e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.619901] env[62974]: DEBUG oslo_vmware.api [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for the task: (returnval){ [ 1298.619901] env[62974]: value = "task-2655285" [ 1298.619901] env[62974]: _type = "Task" [ 1298.619901] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.627524] env[62974]: DEBUG oslo_vmware.api [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.922371] env[62974]: DEBUG nova.objects.instance [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'flavor' on Instance uuid e71134bd-23a1-4cc3-9e85-e8b6054be6d5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1299.130163] env[62974]: DEBUG oslo_vmware.api [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Task: {'id': task-2655285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144224} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.130691] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1299.130891] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1299.131087] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1299.131335] env[62974]: INFO nova.compute.manager [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1299.131773] env[62974]: DEBUG oslo.service.loopingcall [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1299.132110] env[62974]: DEBUG nova.compute.manager [-] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1299.132209] env[62974]: DEBUG nova.network.neutron [-] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1299.429650] env[62974]: DEBUG oslo_concurrency.lockutils [None req-d3f2b03e-ae4c-4bd7-a140-8d969c44dcc9 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.242s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.567426] env[62974]: DEBUG nova.compute.manager [req-37d6b099-aa9e-4d89-b9eb-e06cfc63228c req-d14e93e5-9a12-43dc-b499-b0d3d1c60d8e service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Received event network-vif-deleted-c5072b58-30b6-47d8-ab41-30ea057f6478 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1299.567426] env[62974]: INFO nova.compute.manager [req-37d6b099-aa9e-4d89-b9eb-e06cfc63228c req-d14e93e5-9a12-43dc-b499-b0d3d1c60d8e service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Neutron deleted interface c5072b58-30b6-47d8-ab41-30ea057f6478; detaching it from the instance and deleting it from the info cache [ 1299.568072] env[62974]: DEBUG nova.network.neutron [req-37d6b099-aa9e-4d89-b9eb-e06cfc63228c req-d14e93e5-9a12-43dc-b499-b0d3d1c60d8e service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.859061] env[62974]: DEBUG oslo_concurrency.lockutils [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.859326] env[62974]: DEBUG oslo_concurrency.lockutils [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.051900] env[62974]: DEBUG nova.network.neutron [-] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.070594] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce002015-67f7-4d06-b648-c2d55001ab28 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.080568] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f80070-8ac7-4723-8875-cfd0af2d5a54 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.105937] env[62974]: DEBUG nova.compute.manager [req-37d6b099-aa9e-4d89-b9eb-e06cfc63228c req-d14e93e5-9a12-43dc-b499-b0d3d1c60d8e service nova] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Detach interface failed, port_id=c5072b58-30b6-47d8-ab41-30ea057f6478, reason: Instance 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1300.362640] env[62974]: INFO nova.compute.manager [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Detaching volume d2e0787e-8f43-42fb-a530-768320c515c7 [ 1300.397205] env[62974]: INFO nova.virt.block_device [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Attempting to driver detach volume d2e0787e-8f43-42fb-a530-768320c515c7 from mountpoint /dev/sdb [ 1300.397601] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Volume detach. Driver type: vmdk {{(pid=62974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1300.397919] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535522', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'name': 'volume-d2e0787e-8f43-42fb-a530-768320c515c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e71134bd-23a1-4cc3-9e85-e8b6054be6d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'serial': 'd2e0787e-8f43-42fb-a530-768320c515c7'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1300.399309] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed05858-af38-41af-85e8-b8479e12dc51 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.421609] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9659a2-674a-4cb7-ad86-70318f450836 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.428421] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71318bee-0201-42e9-9436-d52090631f2b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.449878] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfd577f-62e4-4b26-b549-450e854f611a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.464257] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] The volume has not been displaced from its original location: [datastore2] volume-d2e0787e-8f43-42fb-a530-768320c515c7/volume-d2e0787e-8f43-42fb-a530-768320c515c7.vmdk. No consolidation needed. {{(pid=62974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1300.469389] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1300.469664] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f25be3ec-6d8e-4854-9757-631f46918043 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.485958] env[62974]: DEBUG oslo_vmware.api [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1300.485958] env[62974]: value = "task-2655286" [ 1300.485958] env[62974]: _type = "Task" [ 1300.485958] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.493423] env[62974]: DEBUG oslo_vmware.api [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655286, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.554276] env[62974]: INFO nova.compute.manager [-] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Took 1.42 seconds to deallocate network for instance. [ 1300.996363] env[62974]: DEBUG oslo_vmware.api [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655286, 'name': ReconfigVM_Task, 'duration_secs': 0.208955} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.996713] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=62974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1301.001159] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-913b10c0-86db-4b38-8ad2-26087e540fba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.015450] env[62974]: DEBUG oslo_vmware.api [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1301.015450] env[62974]: value = "task-2655287" [ 1301.015450] env[62974]: _type = "Task" [ 1301.015450] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.022651] env[62974]: DEBUG oslo_vmware.api [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655287, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.060818] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.061086] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.061311] env[62974]: DEBUG nova.objects.instance [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lazy-loading 'resources' on Instance uuid 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1301.525221] env[62974]: DEBUG oslo_vmware.api [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655287, 'name': ReconfigVM_Task, 'duration_secs': 0.131976} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.525515] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-535522', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'name': 'volume-d2e0787e-8f43-42fb-a530-768320c515c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e71134bd-23a1-4cc3-9e85-e8b6054be6d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd2e0787e-8f43-42fb-a530-768320c515c7', 'serial': 'd2e0787e-8f43-42fb-a530-768320c515c7'} {{(pid=62974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1301.602842] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f0492c-f81d-4d67-89bc-1ce1eda38307 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.610316] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebc9c57-ad88-4e25-a532-7efc59a90c62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.640510] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410bfa80-adfc-4f4b-8ca5-babccfc3a75f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.647125] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3542b549-a922-433a-b86d-c955a0e89c27 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.659689] env[62974]: DEBUG nova.compute.provider_tree [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1302.065020] env[62974]: DEBUG nova.objects.instance [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'flavor' on Instance uuid e71134bd-23a1-4cc3-9e85-e8b6054be6d5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.163051] env[62974]: DEBUG nova.scheduler.client.report [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1302.667363] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.688201] env[62974]: INFO nova.scheduler.client.report [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Deleted allocations for instance 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be [ 1303.071906] env[62974]: DEBUG oslo_concurrency.lockutils [None req-048e9571-240a-4552-b3d4-1fb8ee224829 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.212s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.196634] env[62974]: DEBUG oslo_concurrency.lockutils [None req-8ec3217b-37ed-4d84-9faf-1e212eec9975 tempest-ServerActionsTestJSON-599772511 tempest-ServerActionsTestJSON-599772511-project-member] Lock "21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.686s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.095084] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.095448] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.095603] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.095798] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.095968] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.098096] env[62974]: INFO nova.compute.manager [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Terminating instance [ 1304.605144] env[62974]: DEBUG nova.compute.manager [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1304.605385] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1304.606354] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e12d0d-7583-4e76-9197-c78841fa71c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.614099] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1304.614335] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ce8c1c9-2230-4893-a2d0-196b82c7aa30 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.620952] env[62974]: DEBUG oslo_vmware.api [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1304.620952] env[62974]: value = "task-2655288" [ 1304.620952] env[62974]: _type = "Task" [ 1304.620952] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.629013] env[62974]: DEBUG oslo_vmware.api [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.133947] env[62974]: DEBUG oslo_vmware.api [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655288, 'name': PowerOffVM_Task, 'duration_secs': 0.215575} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.134610] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1305.134610] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1305.134777] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6bd7cf2-a779-492e-b5c7-d2658324d3dc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.199290] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1305.199513] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1305.199712] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleting the datastore file [datastore1] e71134bd-23a1-4cc3-9e85-e8b6054be6d5 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1305.200026] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21e684e2-66f3-4b89-ac93-0274f739c59c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.206246] env[62974]: DEBUG oslo_vmware.api [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for the task: (returnval){ [ 1305.206246] env[62974]: value = "task-2655290" [ 1305.206246] env[62974]: _type = "Task" [ 1305.206246] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.215214] env[62974]: DEBUG oslo_vmware.api [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.716420] env[62974]: DEBUG oslo_vmware.api [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Task: {'id': task-2655290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146974} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.716683] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1305.716863] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1305.717153] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1305.717320] env[62974]: INFO nova.compute.manager [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1305.717590] env[62974]: DEBUG oslo.service.loopingcall [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1305.717824] env[62974]: DEBUG nova.compute.manager [-] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1305.717952] env[62974]: DEBUG nova.network.neutron [-] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1306.502421] env[62974]: DEBUG nova.compute.manager [req-92da049b-bc10-418f-ab0e-de860f6a5f49 req-120b04a1-e248-4976-9dec-d9312c01faef service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Received event network-vif-deleted-5585d129-e3b0-4025-8de6-1a1c14bdadc5 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1306.502810] env[62974]: INFO nova.compute.manager [req-92da049b-bc10-418f-ab0e-de860f6a5f49 req-120b04a1-e248-4976-9dec-d9312c01faef service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Neutron deleted interface 5585d129-e3b0-4025-8de6-1a1c14bdadc5; detaching it from the instance and deleting it from the info cache [ 1306.502810] env[62974]: DEBUG nova.network.neutron [req-92da049b-bc10-418f-ab0e-de860f6a5f49 req-120b04a1-e248-4976-9dec-d9312c01faef service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.967869] env[62974]: DEBUG nova.network.neutron [-] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.007895] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-374a8cac-3f02-472e-a309-0bcaaa4abe19 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.021111] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf9059a-b77f-4684-a5a9-4fbe024a7d36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.050995] env[62974]: DEBUG nova.compute.manager [req-92da049b-bc10-418f-ab0e-de860f6a5f49 req-120b04a1-e248-4976-9dec-d9312c01faef service nova] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Detach interface failed, port_id=5585d129-e3b0-4025-8de6-1a1c14bdadc5, reason: Instance e71134bd-23a1-4cc3-9e85-e8b6054be6d5 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1307.471600] env[62974]: INFO nova.compute.manager [-] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Took 1.75 seconds to deallocate network for instance. [ 1307.978741] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.979081] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.979308] env[62974]: DEBUG nova.objects.instance [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lazy-loading 'resources' on Instance uuid e71134bd-23a1-4cc3-9e85-e8b6054be6d5 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1308.492974] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "df3a9d82-1563-4960-a69a-870b3d440081" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.493204] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "df3a9d82-1563-4960-a69a-870b3d440081" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.522627] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910d294b-d343-4589-af5e-c1c1e0d5ddd3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.530426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6bd4af-7db0-47b6-9561-737a4a7f4b4e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.562186] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9612516-31f6-4d05-b789-73f7c2ab320e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.569279] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b67782-e276-400b-a876-6f40c1217dcb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.582633] env[62974]: DEBUG nova.compute.provider_tree [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.995864] env[62974]: DEBUG nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1309.085901] env[62974]: DEBUG nova.scheduler.client.report [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1309.517432] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.590926] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.593427] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.076s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.595900] env[62974]: INFO nova.compute.claims [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1309.608881] env[62974]: INFO nova.scheduler.client.report [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Deleted allocations for instance e71134bd-23a1-4cc3-9e85-e8b6054be6d5 [ 1310.115539] env[62974]: DEBUG oslo_concurrency.lockutils [None req-3055a98a-68ee-4ae8-8c42-f70065f84329 tempest-AttachVolumeNegativeTest-604653782 tempest-AttachVolumeNegativeTest-604653782-project-member] Lock "e71134bd-23a1-4cc3-9e85-e8b6054be6d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.020s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1310.635119] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38efdea-982a-4f17-aa82-b6ab80c5d6e9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.642488] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e54fe1b-026f-437b-b73f-1a2b031f15d6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.671818] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a6a2d4-5678-4b3d-9fc3-80c8316a8741 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.678463] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7924bfb0-cc0f-497e-84f6-7c0665a16dfd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.691010] env[62974]: DEBUG nova.compute.provider_tree [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.194174] env[62974]: DEBUG nova.scheduler.client.report [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1311.698727] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.105s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.699221] env[62974]: DEBUG nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1312.204416] env[62974]: DEBUG nova.compute.utils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1312.205923] env[62974]: DEBUG nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1312.206139] env[62974]: DEBUG nova.network.neutron [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1312.249276] env[62974]: DEBUG nova.policy [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8155d54c630f4e23af762a7294aeca40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6c48c7303fa45ee856d937f85e96080', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1312.493851] env[62974]: DEBUG nova.network.neutron [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Successfully created port: dfce7e27-d5b3-43be-b3ab-52006b1587bd {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1312.709779] env[62974]: DEBUG nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1313.720294] env[62974]: DEBUG nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1313.748772] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1313.749678] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.749678] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1313.749678] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.749678] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1313.749678] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1313.749912] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1313.750582] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1313.750582] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1313.750582] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1313.750759] env[62974]: DEBUG nova.virt.hardware [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1313.751790] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d829bc9-50ff-482f-8df6-8232c444182a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.759426] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb27c93-3936-4c9c-98ae-d855652c02fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.862172] env[62974]: DEBUG nova.compute.manager [req-58f97bec-d21a-4fff-a120-7be5a0559030 req-355b005e-67fb-4f59-bbdd-e4265e9e95b0 service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Received event network-vif-plugged-dfce7e27-d5b3-43be-b3ab-52006b1587bd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1313.862523] env[62974]: DEBUG oslo_concurrency.lockutils [req-58f97bec-d21a-4fff-a120-7be5a0559030 req-355b005e-67fb-4f59-bbdd-e4265e9e95b0 service nova] Acquiring lock "df3a9d82-1563-4960-a69a-870b3d440081-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.862845] env[62974]: DEBUG oslo_concurrency.lockutils [req-58f97bec-d21a-4fff-a120-7be5a0559030 req-355b005e-67fb-4f59-bbdd-e4265e9e95b0 service nova] Lock "df3a9d82-1563-4960-a69a-870b3d440081-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.863212] env[62974]: DEBUG oslo_concurrency.lockutils [req-58f97bec-d21a-4fff-a120-7be5a0559030 req-355b005e-67fb-4f59-bbdd-e4265e9e95b0 service nova] Lock "df3a9d82-1563-4960-a69a-870b3d440081-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.863414] env[62974]: DEBUG nova.compute.manager [req-58f97bec-d21a-4fff-a120-7be5a0559030 req-355b005e-67fb-4f59-bbdd-e4265e9e95b0 service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] No waiting events found dispatching network-vif-plugged-dfce7e27-d5b3-43be-b3ab-52006b1587bd {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1313.863610] env[62974]: WARNING nova.compute.manager [req-58f97bec-d21a-4fff-a120-7be5a0559030 req-355b005e-67fb-4f59-bbdd-e4265e9e95b0 service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Received unexpected event network-vif-plugged-dfce7e27-d5b3-43be-b3ab-52006b1587bd for instance with vm_state building and task_state spawning. [ 1313.938181] env[62974]: DEBUG nova.network.neutron [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Successfully updated port: dfce7e27-d5b3-43be-b3ab-52006b1587bd {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.440657] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.440818] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.440962] env[62974]: DEBUG nova.network.neutron [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.970878] env[62974]: DEBUG nova.network.neutron [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.085042] env[62974]: DEBUG nova.network.neutron [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updating instance_info_cache with network_info: [{"id": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "address": "fa:16:3e:1e:2e:95", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfce7e27-d5", "ovs_interfaceid": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.588413] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.588746] env[62974]: DEBUG nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Instance network_info: |[{"id": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "address": "fa:16:3e:1e:2e:95", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfce7e27-d5", "ovs_interfaceid": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1315.589204] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:2e:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfce7e27-d5b3-43be-b3ab-52006b1587bd', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1315.596566] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating folder: Project (e6c48c7303fa45ee856d937f85e96080). Parent ref: group-v535199. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1315.596827] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8002d9dd-8700-48ad-bae0-d535c0146807 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.607918] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created folder: Project (e6c48c7303fa45ee856d937f85e96080) in parent group-v535199. [ 1315.608105] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating folder: Instances. Parent ref: group-v535523. {{(pid=62974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1315.608307] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-440411d1-9886-4339-8236-a376631adca7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.617645] env[62974]: INFO nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created folder: Instances in parent group-v535523. [ 1315.617861] env[62974]: DEBUG oslo.service.loopingcall [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1315.618043] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1315.618229] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-695a26ce-2d91-4811-9ebc-442763f08d00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.636169] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1315.636169] env[62974]: value = "task-2655295" [ 1315.636169] env[62974]: _type = "Task" [ 1315.636169] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.643085] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655295, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.888622] env[62974]: DEBUG nova.compute.manager [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Received event network-changed-dfce7e27-d5b3-43be-b3ab-52006b1587bd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1315.888911] env[62974]: DEBUG nova.compute.manager [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Refreshing instance network info cache due to event network-changed-dfce7e27-d5b3-43be-b3ab-52006b1587bd. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1315.889094] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] Acquiring lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.889241] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] Acquired lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.889387] env[62974]: DEBUG nova.network.neutron [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Refreshing network info cache for port dfce7e27-d5b3-43be-b3ab-52006b1587bd {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1316.145194] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655295, 'name': CreateVM_Task, 'duration_secs': 0.304388} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.145558] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1316.145993] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.146177] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.146486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1316.146729] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12147c5c-88f6-430c-a778-998effcedc24 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.151132] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1316.151132] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5292a2c2-9c90-4c6e-ffa8-2754caa7fc6a" [ 1316.151132] env[62974]: _type = "Task" [ 1316.151132] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.158504] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5292a2c2-9c90-4c6e-ffa8-2754caa7fc6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.580460] env[62974]: DEBUG nova.network.neutron [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updated VIF entry in instance network info cache for port dfce7e27-d5b3-43be-b3ab-52006b1587bd. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1316.580811] env[62974]: DEBUG nova.network.neutron [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updating instance_info_cache with network_info: [{"id": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "address": "fa:16:3e:1e:2e:95", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfce7e27-d5", "ovs_interfaceid": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.660947] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5292a2c2-9c90-4c6e-ffa8-2754caa7fc6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010843} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.661257] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.661508] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1316.661737] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.661883] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.662093] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1316.662350] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4ef5ddb-8959-419d-88d5-ade2c72f7332 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.670232] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1316.670386] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1316.671066] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98f4ea5f-b76b-4fe3-99bc-859b7f65cce0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.675832] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1316.675832] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5232e9f4-89a4-3191-e418-83b50d030d6e" [ 1316.675832] env[62974]: _type = "Task" [ 1316.675832] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.682876] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5232e9f4-89a4-3191-e418-83b50d030d6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.083456] env[62974]: DEBUG oslo_concurrency.lockutils [req-5c99d142-c0cc-433d-a039-aad830d51914 req-48cdba33-cae4-42f6-9703-937c16f76abe service nova] Releasing lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.186104] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5232e9f4-89a4-3191-e418-83b50d030d6e, 'name': SearchDatastore_Task, 'duration_secs': 0.007957} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.186840] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-681cdefc-1800-46d3-a328-eebaa3e37af7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.191998] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1317.191998] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]5223e6a4-a813-c022-7ba7-b1e2b637a69f" [ 1317.191998] env[62974]: _type = "Task" [ 1317.191998] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.199340] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5223e6a4-a813-c022-7ba7-b1e2b637a69f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.702693] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]5223e6a4-a813-c022-7ba7-b1e2b637a69f, 'name': SearchDatastore_Task, 'duration_secs': 0.009095} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.702965] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.703279] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] df3a9d82-1563-4960-a69a-870b3d440081/df3a9d82-1563-4960-a69a-870b3d440081.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1317.703569] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-682aa91f-930b-4e0d-89ae-8f6915f6c0fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.709783] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1317.709783] env[62974]: value = "task-2655297" [ 1317.709783] env[62974]: _type = "Task" [ 1317.709783] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.717092] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.219329] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655297, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.406266} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.219684] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] df3a9d82-1563-4960-a69a-870b3d440081/df3a9d82-1563-4960-a69a-870b3d440081.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1318.219761] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1318.219968] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c301a5b3-30a4-479e-be6f-cf2b14d8c165 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.226563] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1318.226563] env[62974]: value = "task-2655298" [ 1318.226563] env[62974]: _type = "Task" [ 1318.226563] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.234273] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.735865] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061971} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.736108] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1318.737614] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be0c78c-477d-4f3f-9fad-d8b5084fe7ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.758016] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] df3a9d82-1563-4960-a69a-870b3d440081/df3a9d82-1563-4960-a69a-870b3d440081.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1318.758269] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6cfa19b-cf04-468a-aa95-6cba37863f9f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.778062] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1318.778062] env[62974]: value = "task-2655299" [ 1318.778062] env[62974]: _type = "Task" [ 1318.778062] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.785286] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655299, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.288777] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655299, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.790215] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655299, 'name': ReconfigVM_Task, 'duration_secs': 0.536402} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.790488] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Reconfigured VM instance instance-00000077 to attach disk [datastore1] df3a9d82-1563-4960-a69a-870b3d440081/df3a9d82-1563-4960-a69a-870b3d440081.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1319.791138] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8763c193-2b99-4748-9b1d-066718720e5d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.797915] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1319.797915] env[62974]: value = "task-2655300" [ 1319.797915] env[62974]: _type = "Task" [ 1319.797915] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.809391] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655300, 'name': Rename_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.307282] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655300, 'name': Rename_Task, 'duration_secs': 0.134241} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.307646] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1320.307751] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e177e138-fbe1-4cae-aed0-4ffdad346715 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.313495] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1320.313495] env[62974]: value = "task-2655301" [ 1320.313495] env[62974]: _type = "Task" [ 1320.313495] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.320487] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655301, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.827028] env[62974]: DEBUG oslo_vmware.api [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655301, 'name': PowerOnVM_Task, 'duration_secs': 0.433052} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.827028] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1320.827028] env[62974]: INFO nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Took 7.10 seconds to spawn the instance on the hypervisor. [ 1320.827028] env[62974]: DEBUG nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1320.827028] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0441274-649d-4a42-94f0-2e7bd80c1cab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.346956] env[62974]: INFO nova.compute.manager [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Took 11.85 seconds to build instance. [ 1321.849617] env[62974]: DEBUG oslo_concurrency.lockutils [None req-f6f5146a-6116-45c2-a929-4595166b1bfd tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "df3a9d82-1563-4960-a69a-870b3d440081" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.356s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.361788] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.362112] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.866063] env[62974]: DEBUG nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1323.388621] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.388989] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.390372] env[62974]: INFO nova.compute.claims [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1324.434191] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ac8545-b4dc-46d7-8a00-83a23e7b2006 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.441488] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a995d1-e8cc-47ef-bb21-e4b8d19d56d9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.471901] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3975d27-93cd-4f5c-b142-92b35a5a4990 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.478390] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d97121-7145-404d-9cf1-fc5fecef897d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.490711] env[62974]: DEBUG nova.compute.provider_tree [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1324.994886] env[62974]: DEBUG nova.scheduler.client.report [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1325.499405] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.499907] env[62974]: DEBUG nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1326.004542] env[62974]: DEBUG nova.compute.utils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1326.005905] env[62974]: DEBUG nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1326.006079] env[62974]: DEBUG nova.network.neutron [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1326.059635] env[62974]: DEBUG nova.policy [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8155d54c630f4e23af762a7294aeca40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6c48c7303fa45ee856d937f85e96080', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1326.329249] env[62974]: DEBUG nova.network.neutron [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Successfully created port: 4453966e-e2d5-4d51-8463-36cddadfb48e {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1326.509806] env[62974]: DEBUG nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1327.520763] env[62974]: DEBUG nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1327.553282] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1327.553531] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.553763] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1327.553964] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.554124] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1327.554268] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1327.554471] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1327.554630] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1327.554828] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1327.554997] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1327.555186] env[62974]: DEBUG nova.virt.hardware [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1327.556054] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0438b4b-0ed5-44bf-9ed9-e19ca7a5528e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.564121] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a24f78-99fe-40ec-8b81-a1a515779f71 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.666519] env[62974]: DEBUG nova.compute.manager [req-546fdb89-e7ad-49d3-9fdc-8f8db5507c2a req-78f14a80-4136-4f67-b675-f567231a51fb service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Received event network-vif-plugged-4453966e-e2d5-4d51-8463-36cddadfb48e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1327.666737] env[62974]: DEBUG oslo_concurrency.lockutils [req-546fdb89-e7ad-49d3-9fdc-8f8db5507c2a req-78f14a80-4136-4f67-b675-f567231a51fb service nova] Acquiring lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.666952] env[62974]: DEBUG oslo_concurrency.lockutils [req-546fdb89-e7ad-49d3-9fdc-8f8db5507c2a req-78f14a80-4136-4f67-b675-f567231a51fb service nova] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.667128] env[62974]: DEBUG oslo_concurrency.lockutils [req-546fdb89-e7ad-49d3-9fdc-8f8db5507c2a req-78f14a80-4136-4f67-b675-f567231a51fb service nova] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.667294] env[62974]: DEBUG nova.compute.manager [req-546fdb89-e7ad-49d3-9fdc-8f8db5507c2a req-78f14a80-4136-4f67-b675-f567231a51fb service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] No waiting events found dispatching network-vif-plugged-4453966e-e2d5-4d51-8463-36cddadfb48e {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1327.667459] env[62974]: WARNING nova.compute.manager [req-546fdb89-e7ad-49d3-9fdc-8f8db5507c2a req-78f14a80-4136-4f67-b675-f567231a51fb service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Received unexpected event network-vif-plugged-4453966e-e2d5-4d51-8463-36cddadfb48e for instance with vm_state building and task_state spawning. [ 1327.744580] env[62974]: DEBUG nova.network.neutron [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Successfully updated port: 4453966e-e2d5-4d51-8463-36cddadfb48e {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1328.247458] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "refresh_cache-8ccf385f-5718-4a68-a54c-7aa1d820fa0f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.247607] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "refresh_cache-8ccf385f-5718-4a68-a54c-7aa1d820fa0f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.247769] env[62974]: DEBUG nova.network.neutron [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.778485] env[62974]: DEBUG nova.network.neutron [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1328.891045] env[62974]: DEBUG nova.network.neutron [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Updating instance_info_cache with network_info: [{"id": "4453966e-e2d5-4d51-8463-36cddadfb48e", "address": "fa:16:3e:98:c1:0e", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4453966e-e2", "ovs_interfaceid": "4453966e-e2d5-4d51-8463-36cddadfb48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.393341] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "refresh_cache-8ccf385f-5718-4a68-a54c-7aa1d820fa0f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.393696] env[62974]: DEBUG nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Instance network_info: |[{"id": "4453966e-e2d5-4d51-8463-36cddadfb48e", "address": "fa:16:3e:98:c1:0e", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4453966e-e2", "ovs_interfaceid": "4453966e-e2d5-4d51-8463-36cddadfb48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1329.394141] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:c1:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4453966e-e2d5-4d51-8463-36cddadfb48e', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.401623] env[62974]: DEBUG oslo.service.loopingcall [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.401823] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.402439] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d39cd20e-a981-4ea9-b253-02841d908ee2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.422762] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.422762] env[62974]: value = "task-2655302" [ 1329.422762] env[62974]: _type = "Task" [ 1329.422762] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.429892] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655302, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.692322] env[62974]: DEBUG nova.compute.manager [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Received event network-changed-4453966e-e2d5-4d51-8463-36cddadfb48e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1329.692540] env[62974]: DEBUG nova.compute.manager [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Refreshing instance network info cache due to event network-changed-4453966e-e2d5-4d51-8463-36cddadfb48e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1329.692867] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] Acquiring lock "refresh_cache-8ccf385f-5718-4a68-a54c-7aa1d820fa0f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.693125] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] Acquired lock "refresh_cache-8ccf385f-5718-4a68-a54c-7aa1d820fa0f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.693390] env[62974]: DEBUG nova.network.neutron [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Refreshing network info cache for port 4453966e-e2d5-4d51-8463-36cddadfb48e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1329.931914] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655302, 'name': CreateVM_Task, 'duration_secs': 0.329918} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.932275] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1329.932692] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.932884] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.933221] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1329.933459] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1348879e-3478-43f0-aed6-d7df4515c9d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.938188] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1329.938188] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b2ec6-fb34-8d75-a790-98c88ab9bd60" [ 1329.938188] env[62974]: _type = "Task" [ 1329.938188] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.945338] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b2ec6-fb34-8d75-a790-98c88ab9bd60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.365020] env[62974]: DEBUG nova.network.neutron [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Updated VIF entry in instance network info cache for port 4453966e-e2d5-4d51-8463-36cddadfb48e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1330.365392] env[62974]: DEBUG nova.network.neutron [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Updating instance_info_cache with network_info: [{"id": "4453966e-e2d5-4d51-8463-36cddadfb48e", "address": "fa:16:3e:98:c1:0e", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4453966e-e2", "ovs_interfaceid": "4453966e-e2d5-4d51-8463-36cddadfb48e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.449858] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]526b2ec6-fb34-8d75-a790-98c88ab9bd60, 'name': SearchDatastore_Task, 'duration_secs': 0.011176} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.450157] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.450378] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1330.450607] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.450754] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.450956] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.451232] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd736273-e567-4db3-a9ec-38ce9a7f3600 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.459092] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.459232] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1330.459913] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aec306c-0f74-4f89-bf04-f2fdd0584f9a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.464754] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1330.464754] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52adf02c-c5b6-c50b-6f5f-bfd30f5302ce" [ 1330.464754] env[62974]: _type = "Task" [ 1330.464754] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.471726] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52adf02c-c5b6-c50b-6f5f-bfd30f5302ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.868249] env[62974]: DEBUG oslo_concurrency.lockutils [req-3a1db434-0553-44fc-ae38-ee455486ef89 req-9befa6c0-865e-4e6d-9db2-c31da5597a0f service nova] Releasing lock "refresh_cache-8ccf385f-5718-4a68-a54c-7aa1d820fa0f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.974606] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52adf02c-c5b6-c50b-6f5f-bfd30f5302ce, 'name': SearchDatastore_Task, 'duration_secs': 0.007906} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.975434] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d23cef4-f7c9-49b1-b981-8bdee0128867 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.980112] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1330.980112] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1493c-290c-6679-0f7c-7cbe08f067a5" [ 1330.980112] env[62974]: _type = "Task" [ 1330.980112] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.986962] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1493c-290c-6679-0f7c-7cbe08f067a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.491133] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f1493c-290c-6679-0f7c-7cbe08f067a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009028} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.491386] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.491647] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 8ccf385f-5718-4a68-a54c-7aa1d820fa0f/8ccf385f-5718-4a68-a54c-7aa1d820fa0f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1331.491901] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5eca37a5-1623-49fb-8de5-1b1575ab7f0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.498332] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1331.498332] env[62974]: value = "task-2655303" [ 1331.498332] env[62974]: _type = "Task" [ 1331.498332] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.505394] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.008379] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.413486} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.008752] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] 8ccf385f-5718-4a68-a54c-7aa1d820fa0f/8ccf385f-5718-4a68-a54c-7aa1d820fa0f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.008848] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.009111] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71ddcb50-52d6-4bbb-bb73-0d83c056064a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.015044] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1332.015044] env[62974]: value = "task-2655304" [ 1332.015044] env[62974]: _type = "Task" [ 1332.015044] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.021825] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655304, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.524871] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655304, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054148} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.525161] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1332.525917] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe09f63-bfa8-49b5-8476-277f61ae3c9b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.546670] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 8ccf385f-5718-4a68-a54c-7aa1d820fa0f/8ccf385f-5718-4a68-a54c-7aa1d820fa0f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1332.546885] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0b83c0e-85bc-4105-a316-cd0f3b1d4741 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.564909] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1332.564909] env[62974]: value = "task-2655305" [ 1332.564909] env[62974]: _type = "Task" [ 1332.564909] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.571851] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.074735] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655305, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.575525] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655305, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.076210] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655305, 'name': ReconfigVM_Task, 'duration_secs': 1.307209} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.076534] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 8ccf385f-5718-4a68-a54c-7aa1d820fa0f/8ccf385f-5718-4a68-a54c-7aa1d820fa0f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.077124] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd050a82-e25d-4f6a-89fb-5a90fd280458 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.084168] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1334.084168] env[62974]: value = "task-2655306" [ 1334.084168] env[62974]: _type = "Task" [ 1334.084168] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.092489] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655306, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.593827] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655306, 'name': Rename_Task, 'duration_secs': 0.144263} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.594115] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1334.595019] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1be0a5f4-c813-4610-814f-bba96b3e53b4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.600484] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1334.600484] env[62974]: value = "task-2655307" [ 1334.600484] env[62974]: _type = "Task" [ 1334.600484] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.607804] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.110442] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655307, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.610995] env[62974]: DEBUG oslo_vmware.api [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655307, 'name': PowerOnVM_Task, 'duration_secs': 0.640479} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.611268] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1335.611477] env[62974]: INFO nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Took 8.09 seconds to spawn the instance on the hypervisor. [ 1335.611650] env[62974]: DEBUG nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1335.612388] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276bd335-6644-4388-b866-d9a0fa953ab3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.130447] env[62974]: INFO nova.compute.manager [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Took 12.76 seconds to build instance. [ 1336.632773] env[62974]: DEBUG oslo_concurrency.lockutils [None req-a188415e-1181-403a-a11d-9a05c64aeb10 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.270s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.922057] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.922057] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.922320] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.922383] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.922513] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.924954] env[62974]: INFO nova.compute.manager [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Terminating instance [ 1337.429483] env[62974]: DEBUG nova.compute.manager [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1337.429884] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1337.430660] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd1d2b6-4ad0-4e96-839d-40a6abf5c03a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.438636] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1337.438856] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d17842eb-e27d-48fe-bc3f-e2df38ecc51e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.444319] env[62974]: DEBUG oslo_vmware.api [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1337.444319] env[62974]: value = "task-2655308" [ 1337.444319] env[62974]: _type = "Task" [ 1337.444319] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.452159] env[62974]: DEBUG oslo_vmware.api [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.953718] env[62974]: DEBUG oslo_vmware.api [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655308, 'name': PowerOffVM_Task, 'duration_secs': 0.205688} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.954014] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1337.954190] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1337.954433] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33993740-e382-47e0-a34c-fe7bec026917 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.020760] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1338.020956] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1338.021162] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleting the datastore file [datastore1] 8ccf385f-5718-4a68-a54c-7aa1d820fa0f {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1338.021415] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-493a7414-c6fb-46f6-83e2-9b47e1500bfe {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.027493] env[62974]: DEBUG oslo_vmware.api [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1338.027493] env[62974]: value = "task-2655310" [ 1338.027493] env[62974]: _type = "Task" [ 1338.027493] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.034472] env[62974]: DEBUG oslo_vmware.api [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.537715] env[62974]: DEBUG oslo_vmware.api [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200238} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.538113] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1338.538170] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1338.538327] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1338.538497] env[62974]: INFO nova.compute.manager [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1338.538726] env[62974]: DEBUG oslo.service.loopingcall [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1338.538917] env[62974]: DEBUG nova.compute.manager [-] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1338.539016] env[62974]: DEBUG nova.network.neutron [-] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1338.799050] env[62974]: DEBUG nova.compute.manager [req-92b20e0b-9a8b-40cc-8585-7f8d4f75451d req-902d22a3-4755-499a-b88b-3c467ed28185 service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Received event network-vif-deleted-4453966e-e2d5-4d51-8463-36cddadfb48e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1338.799050] env[62974]: INFO nova.compute.manager [req-92b20e0b-9a8b-40cc-8585-7f8d4f75451d req-902d22a3-4755-499a-b88b-3c467ed28185 service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Neutron deleted interface 4453966e-e2d5-4d51-8463-36cddadfb48e; detaching it from the instance and deleting it from the info cache [ 1338.799262] env[62974]: DEBUG nova.network.neutron [req-92b20e0b-9a8b-40cc-8585-7f8d4f75451d req-902d22a3-4755-499a-b88b-3c467ed28185 service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.283164] env[62974]: DEBUG nova.network.neutron [-] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.303906] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a6424f3-04ec-45af-a4d7-5bc18d54e0c5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.313701] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d7df42-2628-4374-a2d6-773629a1d7a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.337437] env[62974]: DEBUG nova.compute.manager [req-92b20e0b-9a8b-40cc-8585-7f8d4f75451d req-902d22a3-4755-499a-b88b-3c467ed28185 service nova] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Detach interface failed, port_id=4453966e-e2d5-4d51-8463-36cddadfb48e, reason: Instance 8ccf385f-5718-4a68-a54c-7aa1d820fa0f could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1339.785577] env[62974]: INFO nova.compute.manager [-] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Took 1.25 seconds to deallocate network for instance. [ 1340.292472] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.292738] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.292947] env[62974]: DEBUG nova.objects.instance [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'resources' on Instance uuid 8ccf385f-5718-4a68-a54c-7aa1d820fa0f {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.837806] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6238e519-60b0-448a-bab7-3944edef2463 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.845375] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf3f0b8-f572-4172-98a8-cc0ec5e7ccc9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.876205] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a59345-9986-4cea-a4a6-bfe30f88ba04 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.883036] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a235f1-f385-443c-b971-49b09c97c6ac {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.896141] env[62974]: DEBUG nova.compute.provider_tree [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.399234] env[62974]: DEBUG nova.scheduler.client.report [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1341.904606] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.922134] env[62974]: INFO nova.scheduler.client.report [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted allocations for instance 8ccf385f-5718-4a68-a54c-7aa1d820fa0f [ 1342.430794] env[62974]: DEBUG oslo_concurrency.lockutils [None req-2fc042fd-8321-4223-ae2b-7a0b6eb4e47c tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8ccf385f-5718-4a68-a54c-7aa1d820fa0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.508s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.701987] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1342.702191] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.210621] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.210621] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1343.210621] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Rebuilding the list of instances to heal {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1343.741931] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.742139] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.742264] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1343.742418] env[62974]: DEBUG nova.objects.instance [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lazy-loading 'info_cache' on Instance uuid df3a9d82-1563-4960-a69a-870b3d440081 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1343.925023] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "dac46c56-298c-4556-9e2b-4870f227508c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.925262] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "dac46c56-298c-4556-9e2b-4870f227508c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.428075] env[62974]: DEBUG nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1344.947502] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.947747] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.949846] env[62974]: INFO nova.compute.claims [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1345.431696] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updating instance_info_cache with network_info: [{"id": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "address": "fa:16:3e:1e:2e:95", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfce7e27-d5", "ovs_interfaceid": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.934037] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.934263] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1345.934474] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.934630] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.934794] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.934991] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.935159] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.935312] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.935446] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1345.935580] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.995711] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e99d241-61dc-46b2-bd2e-971e58a283ea {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.003671] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b842f2b0-6cc5-4251-984e-e39231625f0f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.034316] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e82476-b2dc-4427-ad0e-194b01b863e7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.042224] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26ef8d4-4e95-4836-a632-94dd212dbd00 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.055886] env[62974]: DEBUG nova.compute.provider_tree [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.438813] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.559168] env[62974]: DEBUG nova.scheduler.client.report [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1347.065572] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.118s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.066021] env[62974]: DEBUG nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1347.068758] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.630s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.068950] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.069083] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1347.070144] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578e6ede-c4b5-4574-9deb-ec4296ff7ce1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.078762] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fd403c-6e32-47e1-9523-73d02400ee5b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.092569] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1a2122-0071-4b90-b890-1c5a18286a7d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.099073] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a318cee-bc8b-4ebb-bf5f-7719c1cab996 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.127805] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181127MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1347.128062] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.128159] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.571596] env[62974]: DEBUG nova.compute.utils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1347.573040] env[62974]: DEBUG nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1347.573185] env[62974]: DEBUG nova.network.neutron [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1347.609685] env[62974]: DEBUG nova.policy [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8155d54c630f4e23af762a7294aeca40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6c48c7303fa45ee856d937f85e96080', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1347.859575] env[62974]: DEBUG nova.network.neutron [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Successfully created port: 0ffd15a0-a85d-4086-b31f-6de20e261801 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1348.076524] env[62974]: DEBUG nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1348.150200] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance df3a9d82-1563-4960-a69a-870b3d440081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.150382] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance dac46c56-298c-4556-9e2b-4870f227508c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.150575] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1348.150729] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1348.186195] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330f10f6-5f8d-438a-9764-dd402582c35c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.194711] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e68d7a-027b-4d20-9f36-a0be113fca7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.223429] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13607b4c-9212-4292-b416-939bb2fefdb7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.229852] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a87b470-0416-422b-9de0-8328613afb1a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.242290] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1348.745059] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1349.086682] env[62974]: DEBUG nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1349.113936] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1349.114218] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1349.114375] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1349.114554] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1349.114698] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1349.114845] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1349.115101] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1349.115267] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1349.115430] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1349.115590] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1349.115758] env[62974]: DEBUG nova.virt.hardware [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1349.116620] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa7c64e-f001-4456-95a9-6da77c4c2dc8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.124290] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99037e7a-db6b-472a-af51-5e2ccf9da0ed {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.218138] env[62974]: DEBUG nova.compute.manager [req-93fd2a33-94a7-42e8-b523-b0a317a04fdd req-fe943bb8-72c0-4289-a125-7bba8d8f7e46 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Received event network-vif-plugged-0ffd15a0-a85d-4086-b31f-6de20e261801 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1349.218350] env[62974]: DEBUG oslo_concurrency.lockutils [req-93fd2a33-94a7-42e8-b523-b0a317a04fdd req-fe943bb8-72c0-4289-a125-7bba8d8f7e46 service nova] Acquiring lock "dac46c56-298c-4556-9e2b-4870f227508c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.218557] env[62974]: DEBUG oslo_concurrency.lockutils [req-93fd2a33-94a7-42e8-b523-b0a317a04fdd req-fe943bb8-72c0-4289-a125-7bba8d8f7e46 service nova] Lock "dac46c56-298c-4556-9e2b-4870f227508c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.218707] env[62974]: DEBUG oslo_concurrency.lockutils [req-93fd2a33-94a7-42e8-b523-b0a317a04fdd req-fe943bb8-72c0-4289-a125-7bba8d8f7e46 service nova] Lock "dac46c56-298c-4556-9e2b-4870f227508c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.218873] env[62974]: DEBUG nova.compute.manager [req-93fd2a33-94a7-42e8-b523-b0a317a04fdd req-fe943bb8-72c0-4289-a125-7bba8d8f7e46 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] No waiting events found dispatching network-vif-plugged-0ffd15a0-a85d-4086-b31f-6de20e261801 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1349.219205] env[62974]: WARNING nova.compute.manager [req-93fd2a33-94a7-42e8-b523-b0a317a04fdd req-fe943bb8-72c0-4289-a125-7bba8d8f7e46 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Received unexpected event network-vif-plugged-0ffd15a0-a85d-4086-b31f-6de20e261801 for instance with vm_state building and task_state spawning. [ 1349.249259] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1349.249422] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.121s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.296373] env[62974]: DEBUG nova.network.neutron [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Successfully updated port: 0ffd15a0-a85d-4086-b31f-6de20e261801 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1349.799187] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "refresh_cache-dac46c56-298c-4556-9e2b-4870f227508c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.799505] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "refresh_cache-dac46c56-298c-4556-9e2b-4870f227508c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.799505] env[62974]: DEBUG nova.network.neutron [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1350.330468] env[62974]: DEBUG nova.network.neutron [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1350.448650] env[62974]: DEBUG nova.network.neutron [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Updating instance_info_cache with network_info: [{"id": "0ffd15a0-a85d-4086-b31f-6de20e261801", "address": "fa:16:3e:fb:49:33", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ffd15a0-a8", "ovs_interfaceid": "0ffd15a0-a85d-4086-b31f-6de20e261801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.951906] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "refresh_cache-dac46c56-298c-4556-9e2b-4870f227508c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.952274] env[62974]: DEBUG nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Instance network_info: |[{"id": "0ffd15a0-a85d-4086-b31f-6de20e261801", "address": "fa:16:3e:fb:49:33", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ffd15a0-a8", "ovs_interfaceid": "0ffd15a0-a85d-4086-b31f-6de20e261801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1350.952690] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:49:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ffd15a0-a85d-4086-b31f-6de20e261801', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.960186] env[62974]: DEBUG oslo.service.loopingcall [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.960420] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1350.960641] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24168bef-a20a-4743-95bb-e889407205f6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.980240] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.980240] env[62974]: value = "task-2655311" [ 1350.980240] env[62974]: _type = "Task" [ 1350.980240] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.987417] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655311, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.243778] env[62974]: DEBUG nova.compute.manager [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Received event network-changed-0ffd15a0-a85d-4086-b31f-6de20e261801 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1351.244037] env[62974]: DEBUG nova.compute.manager [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Refreshing instance network info cache due to event network-changed-0ffd15a0-a85d-4086-b31f-6de20e261801. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1351.244254] env[62974]: DEBUG oslo_concurrency.lockutils [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] Acquiring lock "refresh_cache-dac46c56-298c-4556-9e2b-4870f227508c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.244393] env[62974]: DEBUG oslo_concurrency.lockutils [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] Acquired lock "refresh_cache-dac46c56-298c-4556-9e2b-4870f227508c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.244548] env[62974]: DEBUG nova.network.neutron [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Refreshing network info cache for port 0ffd15a0-a85d-4086-b31f-6de20e261801 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1351.490191] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655311, 'name': CreateVM_Task, 'duration_secs': 0.298008} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.490373] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1351.490992] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.491178] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.491490] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1351.491735] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0292ee97-aad6-49da-824f-37954b5a6ffc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.495789] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1351.495789] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c37e0-e383-d7b5-39e9-8e00dcd2e8f7" [ 1351.495789] env[62974]: _type = "Task" [ 1351.495789] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.502885] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c37e0-e383-d7b5-39e9-8e00dcd2e8f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.917848] env[62974]: DEBUG nova.network.neutron [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Updated VIF entry in instance network info cache for port 0ffd15a0-a85d-4086-b31f-6de20e261801. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.917968] env[62974]: DEBUG nova.network.neutron [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Updating instance_info_cache with network_info: [{"id": "0ffd15a0-a85d-4086-b31f-6de20e261801", "address": "fa:16:3e:fb:49:33", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ffd15a0-a8", "ovs_interfaceid": "0ffd15a0-a85d-4086-b31f-6de20e261801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.005582] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521c37e0-e383-d7b5-39e9-8e00dcd2e8f7, 'name': SearchDatastore_Task, 'duration_secs': 0.009709} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.005881] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.006127] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1352.006364] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.006509] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.006681] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1352.006927] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f221c73-ebc6-4215-a5c2-a34b62aa4e3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.015173] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1352.015346] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1352.016026] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69ca8e4f-2d67-41ae-bb20-7db970fd6aae {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.020473] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1352.020473] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a98193-f64d-6275-6822-50eebe3cceec" [ 1352.020473] env[62974]: _type = "Task" [ 1352.020473] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.027726] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a98193-f64d-6275-6822-50eebe3cceec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.420309] env[62974]: DEBUG oslo_concurrency.lockutils [req-30d5ba47-803e-4015-abbd-358dfda09f73 req-418c4fc7-0dda-47ff-883f-7e419949e8f9 service nova] Releasing lock "refresh_cache-dac46c56-298c-4556-9e2b-4870f227508c" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.531912] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a98193-f64d-6275-6822-50eebe3cceec, 'name': SearchDatastore_Task, 'duration_secs': 0.00836} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.532681] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ef23a7-b198-4a37-8b2f-644017496162 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.537657] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1352.537657] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d1790e-711c-9021-2900-39bf78bc619b" [ 1352.537657] env[62974]: _type = "Task" [ 1352.537657] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.544865] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d1790e-711c-9021-2900-39bf78bc619b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.048407] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52d1790e-711c-9021-2900-39bf78bc619b, 'name': SearchDatastore_Task, 'duration_secs': 0.011273} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.048671] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.048911] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] dac46c56-298c-4556-9e2b-4870f227508c/dac46c56-298c-4556-9e2b-4870f227508c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1353.049173] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff54732c-04a5-4ae0-8e43-3ded18b63a62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.055962] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1353.055962] env[62974]: value = "task-2655312" [ 1353.055962] env[62974]: _type = "Task" [ 1353.055962] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.063484] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.565960] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.408823} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.566251] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore1] dac46c56-298c-4556-9e2b-4870f227508c/dac46c56-298c-4556-9e2b-4870f227508c.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1353.566452] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1353.566688] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb63bccd-63e9-4daf-a48a-5a7ff608bef1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.573466] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1353.573466] env[62974]: value = "task-2655313" [ 1353.573466] env[62974]: _type = "Task" [ 1353.573466] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.580415] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.086014] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063452} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.086390] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1354.087377] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff670e09-8832-4298-81fa-95ed151b7d01 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.113549] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] dac46c56-298c-4556-9e2b-4870f227508c/dac46c56-298c-4556-9e2b-4870f227508c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1354.113549] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d0fc1a3-1386-400a-8824-4596deae802f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.131532] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1354.131532] env[62974]: value = "task-2655314" [ 1354.131532] env[62974]: _type = "Task" [ 1354.131532] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.138897] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.641843] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655314, 'name': ReconfigVM_Task, 'duration_secs': 0.312911} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.642132] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Reconfigured VM instance instance-00000079 to attach disk [datastore1] dac46c56-298c-4556-9e2b-4870f227508c/dac46c56-298c-4556-9e2b-4870f227508c.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1354.642710] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbffac0e-cda3-4dcc-8605-a84cb2703ae2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.648671] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1354.648671] env[62974]: value = "task-2655315" [ 1354.648671] env[62974]: _type = "Task" [ 1354.648671] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.656089] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655315, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.158695] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655315, 'name': Rename_Task, 'duration_secs': 0.157288} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.159093] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1355.159202] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27cb82ce-d519-4151-8da7-782d5bc04d7b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.165443] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1355.165443] env[62974]: value = "task-2655316" [ 1355.165443] env[62974]: _type = "Task" [ 1355.165443] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.172306] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655316, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.676094] env[62974]: DEBUG oslo_vmware.api [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655316, 'name': PowerOnVM_Task, 'duration_secs': 0.441279} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.676094] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1355.676418] env[62974]: INFO nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Took 6.59 seconds to spawn the instance on the hypervisor. [ 1355.676418] env[62974]: DEBUG nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1355.677112] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d544513-84bf-4501-bdfc-6f637cee4f21 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.196263] env[62974]: INFO nova.compute.manager [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Took 11.26 seconds to build instance. [ 1356.697912] env[62974]: DEBUG oslo_concurrency.lockutils [None req-c6e123b1-5728-45ec-8aea-f01ae7639c11 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "dac46c56-298c-4556-9e2b-4870f227508c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.772s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.042066] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "dac46c56-298c-4556-9e2b-4870f227508c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.042294] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "dac46c56-298c-4556-9e2b-4870f227508c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.042444] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "dac46c56-298c-4556-9e2b-4870f227508c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.042625] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "dac46c56-298c-4556-9e2b-4870f227508c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.042791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "dac46c56-298c-4556-9e2b-4870f227508c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.045269] env[62974]: INFO nova.compute.manager [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Terminating instance [ 1357.548642] env[62974]: DEBUG nova.compute.manager [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1357.548950] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1357.550151] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa3af12-ca5f-4c4b-93a9-8cf3ab95289d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.557724] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1357.557947] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-221d431a-8b0f-4e31-95bc-a34214907272 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.563571] env[62974]: DEBUG oslo_vmware.api [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1357.563571] env[62974]: value = "task-2655317" [ 1357.563571] env[62974]: _type = "Task" [ 1357.563571] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.571813] env[62974]: DEBUG oslo_vmware.api [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.073331] env[62974]: DEBUG oslo_vmware.api [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655317, 'name': PowerOffVM_Task, 'duration_secs': 0.220475} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.073590] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1358.073755] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1358.074040] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c7d10ce-6bb8-4e1f-8a10-738a19eac436 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.140335] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1358.140567] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1358.140753] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleting the datastore file [datastore1] dac46c56-298c-4556-9e2b-4870f227508c {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1358.141034] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-704982a4-bad9-4f95-8c8d-c62dc067f598 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.147377] env[62974]: DEBUG oslo_vmware.api [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1358.147377] env[62974]: value = "task-2655319" [ 1358.147377] env[62974]: _type = "Task" [ 1358.147377] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.154890] env[62974]: DEBUG oslo_vmware.api [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.657098] env[62974]: DEBUG oslo_vmware.api [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.301825} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.657488] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1358.657546] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1358.657706] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1358.657883] env[62974]: INFO nova.compute.manager [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1358.658143] env[62974]: DEBUG oslo.service.loopingcall [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1358.658340] env[62974]: DEBUG nova.compute.manager [-] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1358.658437] env[62974]: DEBUG nova.network.neutron [-] [instance: dac46c56-298c-4556-9e2b-4870f227508c] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1358.900380] env[62974]: DEBUG nova.compute.manager [req-85337983-18a2-4e48-9fe4-739b606b4d08 req-d737d0a4-0075-4482-a05e-b1a8204c0c1d service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Received event network-vif-deleted-0ffd15a0-a85d-4086-b31f-6de20e261801 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1358.900464] env[62974]: INFO nova.compute.manager [req-85337983-18a2-4e48-9fe4-739b606b4d08 req-d737d0a4-0075-4482-a05e-b1a8204c0c1d service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Neutron deleted interface 0ffd15a0-a85d-4086-b31f-6de20e261801; detaching it from the instance and deleting it from the info cache [ 1358.900680] env[62974]: DEBUG nova.network.neutron [req-85337983-18a2-4e48-9fe4-739b606b4d08 req-d737d0a4-0075-4482-a05e-b1a8204c0c1d service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.384539] env[62974]: DEBUG nova.network.neutron [-] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.402897] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00c30520-8b56-49ab-9a79-033b0ea57469 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.412265] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb85cf6-4a1e-42aa-9b39-42f2fe1ad661 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.436992] env[62974]: DEBUG nova.compute.manager [req-85337983-18a2-4e48-9fe4-739b606b4d08 req-d737d0a4-0075-4482-a05e-b1a8204c0c1d service nova] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Detach interface failed, port_id=0ffd15a0-a85d-4086-b31f-6de20e261801, reason: Instance dac46c56-298c-4556-9e2b-4870f227508c could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1359.888027] env[62974]: INFO nova.compute.manager [-] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Took 1.23 seconds to deallocate network for instance. [ 1360.394282] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.394546] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.394761] env[62974]: DEBUG nova.objects.instance [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'resources' on Instance uuid dac46c56-298c-4556-9e2b-4870f227508c {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.940569] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d288c4c-8197-4159-8d99-59e9451359fb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.948131] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de86624-bfe9-4da7-af33-503888ddf057 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.977481] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4ff615-b16f-4bca-85c0-53910b363da7 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.984275] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17373730-0be5-4d1c-85b9-4f8ad9570922 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.997472] env[62974]: DEBUG nova.compute.provider_tree [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.500286] env[62974]: DEBUG nova.scheduler.client.report [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1362.005492] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.027374] env[62974]: INFO nova.scheduler.client.report [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted allocations for instance dac46c56-298c-4556-9e2b-4870f227508c [ 1362.536856] env[62974]: DEBUG oslo_concurrency.lockutils [None req-7073fc06-ae56-49ee-b3b2-2d97c3107ae9 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "dac46c56-298c-4556-9e2b-4870f227508c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.495s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.951166] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "609d36b1-52e2-4747-8a6c-15ee41883174" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.951488] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "609d36b1-52e2-4747-8a6c-15ee41883174" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.453422] env[62974]: DEBUG nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1364.975157] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.975471] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.976899] env[62974]: INFO nova.compute.claims [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1366.021582] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1b793e-28b4-45a8-9bf7-8954a2cde510 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.028677] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b80ac18-5223-4d20-9e26-f1aeaa755601 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.058344] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17909936-79b0-4cd1-910b-459520a6e782 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.064748] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04859e17-e663-4e01-86fc-74b10566b9ca {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.076937] env[62974]: DEBUG nova.compute.provider_tree [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.581064] env[62974]: DEBUG nova.scheduler.client.report [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1367.085447] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.085962] env[62974]: DEBUG nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1367.591083] env[62974]: DEBUG nova.compute.utils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1367.592551] env[62974]: DEBUG nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1367.592726] env[62974]: DEBUG nova.network.neutron [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1367.638983] env[62974]: DEBUG nova.policy [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8155d54c630f4e23af762a7294aeca40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6c48c7303fa45ee856d937f85e96080', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1367.882984] env[62974]: DEBUG nova.network.neutron [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Successfully created port: 3f3974df-4488-4442-880c-08db0733d671 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1368.096536] env[62974]: DEBUG nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1369.106145] env[62974]: DEBUG nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1369.134547] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1369.134787] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.135076] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1369.135182] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.135338] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1369.135486] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1369.135690] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1369.135844] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1369.136015] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1369.136208] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1369.136380] env[62974]: DEBUG nova.virt.hardware [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1369.137336] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7443cad5-431b-4199-908d-cf33d897d249 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.145834] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de61db0-6a35-499d-b3ba-8ee72ed53a3f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.220151] env[62974]: DEBUG nova.compute.manager [req-2a30a392-bb45-4c41-ab27-c96ae36037ee req-4b1a9a58-0caa-4ca8-b560-2a9f85a9e2ea service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Received event network-vif-plugged-3f3974df-4488-4442-880c-08db0733d671 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1369.220382] env[62974]: DEBUG oslo_concurrency.lockutils [req-2a30a392-bb45-4c41-ab27-c96ae36037ee req-4b1a9a58-0caa-4ca8-b560-2a9f85a9e2ea service nova] Acquiring lock "609d36b1-52e2-4747-8a6c-15ee41883174-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.220585] env[62974]: DEBUG oslo_concurrency.lockutils [req-2a30a392-bb45-4c41-ab27-c96ae36037ee req-4b1a9a58-0caa-4ca8-b560-2a9f85a9e2ea service nova] Lock "609d36b1-52e2-4747-8a6c-15ee41883174-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.220749] env[62974]: DEBUG oslo_concurrency.lockutils [req-2a30a392-bb45-4c41-ab27-c96ae36037ee req-4b1a9a58-0caa-4ca8-b560-2a9f85a9e2ea service nova] Lock "609d36b1-52e2-4747-8a6c-15ee41883174-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.220920] env[62974]: DEBUG nova.compute.manager [req-2a30a392-bb45-4c41-ab27-c96ae36037ee req-4b1a9a58-0caa-4ca8-b560-2a9f85a9e2ea service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] No waiting events found dispatching network-vif-plugged-3f3974df-4488-4442-880c-08db0733d671 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1369.222386] env[62974]: WARNING nova.compute.manager [req-2a30a392-bb45-4c41-ab27-c96ae36037ee req-4b1a9a58-0caa-4ca8-b560-2a9f85a9e2ea service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Received unexpected event network-vif-plugged-3f3974df-4488-4442-880c-08db0733d671 for instance with vm_state building and task_state spawning. [ 1369.310653] env[62974]: DEBUG nova.network.neutron [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Successfully updated port: 3f3974df-4488-4442-880c-08db0733d671 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1369.813486] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "refresh_cache-609d36b1-52e2-4747-8a6c-15ee41883174" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.813635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "refresh_cache-609d36b1-52e2-4747-8a6c-15ee41883174" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.813782] env[62974]: DEBUG nova.network.neutron [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1370.344831] env[62974]: DEBUG nova.network.neutron [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1370.456688] env[62974]: DEBUG nova.network.neutron [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Updating instance_info_cache with network_info: [{"id": "3f3974df-4488-4442-880c-08db0733d671", "address": "fa:16:3e:72:9f:6e", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f3974df-44", "ovs_interfaceid": "3f3974df-4488-4442-880c-08db0733d671", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.959893] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "refresh_cache-609d36b1-52e2-4747-8a6c-15ee41883174" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.960249] env[62974]: DEBUG nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Instance network_info: |[{"id": "3f3974df-4488-4442-880c-08db0733d671", "address": "fa:16:3e:72:9f:6e", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f3974df-44", "ovs_interfaceid": "3f3974df-4488-4442-880c-08db0733d671", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1370.960687] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:9f:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f3974df-4488-4442-880c-08db0733d671', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1370.968043] env[62974]: DEBUG oslo.service.loopingcall [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.968248] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1370.968462] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9e6d42d-65ec-4ca8-89e8-992e25e92fd8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.987664] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1370.987664] env[62974]: value = "task-2655320" [ 1370.987664] env[62974]: _type = "Task" [ 1370.987664] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.995150] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655320, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.251552] env[62974]: DEBUG nova.compute.manager [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Received event network-changed-3f3974df-4488-4442-880c-08db0733d671 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1371.251668] env[62974]: DEBUG nova.compute.manager [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Refreshing instance network info cache due to event network-changed-3f3974df-4488-4442-880c-08db0733d671. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1371.251886] env[62974]: DEBUG oslo_concurrency.lockutils [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] Acquiring lock "refresh_cache-609d36b1-52e2-4747-8a6c-15ee41883174" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.252127] env[62974]: DEBUG oslo_concurrency.lockutils [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] Acquired lock "refresh_cache-609d36b1-52e2-4747-8a6c-15ee41883174" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.252368] env[62974]: DEBUG nova.network.neutron [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Refreshing network info cache for port 3f3974df-4488-4442-880c-08db0733d671 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.497932] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655320, 'name': CreateVM_Task, 'duration_secs': 0.296835} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.498382] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1371.498711] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.498872] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.499198] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1371.499439] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc6108bd-fd36-48a7-bd8b-3c8fbd12c5be {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.504064] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1371.504064] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5b303-9157-cbe5-5787-b557745f4d20" [ 1371.504064] env[62974]: _type = "Task" [ 1371.504064] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.511145] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5b303-9157-cbe5-5787-b557745f4d20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.924022] env[62974]: DEBUG nova.network.neutron [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Updated VIF entry in instance network info cache for port 3f3974df-4488-4442-880c-08db0733d671. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1371.924416] env[62974]: DEBUG nova.network.neutron [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Updating instance_info_cache with network_info: [{"id": "3f3974df-4488-4442-880c-08db0733d671", "address": "fa:16:3e:72:9f:6e", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f3974df-44", "ovs_interfaceid": "3f3974df-4488-4442-880c-08db0733d671", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.013977] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52c5b303-9157-cbe5-5787-b557745f4d20, 'name': SearchDatastore_Task, 'duration_secs': 0.011258} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.014331] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.014564] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1372.014791] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.014934] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.015128] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1372.015376] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d51c605e-edde-40a5-8651-9c626e365637 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.023905] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1372.024119] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1372.024820] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9747f17e-b499-4b17-a241-fb098104f245 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.029311] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1372.029311] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f62583-a18a-185c-9d7e-b57c6041cf2a" [ 1372.029311] env[62974]: _type = "Task" [ 1372.029311] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.036599] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f62583-a18a-185c-9d7e-b57c6041cf2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.427246] env[62974]: DEBUG oslo_concurrency.lockutils [req-d2669d06-7476-448d-b997-7ac5b68c1c75 req-aeff26b6-0df5-47f3-8d9a-3cbff47d1a3b service nova] Releasing lock "refresh_cache-609d36b1-52e2-4747-8a6c-15ee41883174" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.539815] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f62583-a18a-185c-9d7e-b57c6041cf2a, 'name': SearchDatastore_Task, 'duration_secs': 0.008205} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.540593] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2e85747-1cde-42cd-8f28-fcacedabc058 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.546021] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1372.546021] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ee7a3-6698-df56-4030-235b462a4dc8" [ 1372.546021] env[62974]: _type = "Task" [ 1372.546021] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.554364] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ee7a3-6698-df56-4030-235b462a4dc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.056707] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]521ee7a3-6698-df56-4030-235b462a4dc8, 'name': SearchDatastore_Task, 'duration_secs': 0.009902} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.056990] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.057254] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 609d36b1-52e2-4747-8a6c-15ee41883174/609d36b1-52e2-4747-8a6c-15ee41883174.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1373.057508] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20e62598-e0e5-4628-8a4a-34e7617b6a74 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.063890] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1373.063890] env[62974]: value = "task-2655321" [ 1373.063890] env[62974]: _type = "Task" [ 1373.063890] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.071614] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.573290] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464169} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.573622] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 609d36b1-52e2-4747-8a6c-15ee41883174/609d36b1-52e2-4747-8a6c-15ee41883174.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1373.573752] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1373.573976] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7cd588f8-a477-41d5-aaac-18f27c1cb88e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.580904] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1373.580904] env[62974]: value = "task-2655322" [ 1373.580904] env[62974]: _type = "Task" [ 1373.580904] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.587595] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.091054] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059014} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.091054] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1374.091590] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f86a2c-aa78-4fdd-9aba-efcd2daae99b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.112409] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 609d36b1-52e2-4747-8a6c-15ee41883174/609d36b1-52e2-4747-8a6c-15ee41883174.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.112620] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8de7d990-14ff-428a-8fb2-387856be3447 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.131097] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1374.131097] env[62974]: value = "task-2655323" [ 1374.131097] env[62974]: _type = "Task" [ 1374.131097] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.139552] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.641730] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655323, 'name': ReconfigVM_Task, 'duration_secs': 0.35767} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.642174] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 609d36b1-52e2-4747-8a6c-15ee41883174/609d36b1-52e2-4747-8a6c-15ee41883174.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1374.642654] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c5be3b3-03b6-483d-a60d-28b778a14c25 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.649298] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1374.649298] env[62974]: value = "task-2655324" [ 1374.649298] env[62974]: _type = "Task" [ 1374.649298] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.657106] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655324, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.158782] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655324, 'name': Rename_Task, 'duration_secs': 0.145174} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.159060] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1375.159374] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56a0cbdf-3447-4e31-b4f5-a182ab26eab2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.166021] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1375.166021] env[62974]: value = "task-2655325" [ 1375.166021] env[62974]: _type = "Task" [ 1375.166021] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.172915] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.676313] env[62974]: DEBUG oslo_vmware.api [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655325, 'name': PowerOnVM_Task, 'duration_secs': 0.458834} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.676697] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1375.676797] env[62974]: INFO nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Took 6.57 seconds to spawn the instance on the hypervisor. [ 1375.676972] env[62974]: DEBUG nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1375.677732] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51756209-f257-4ff6-a02e-666a14643c06 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.196277] env[62974]: INFO nova.compute.manager [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Took 11.24 seconds to build instance. [ 1376.698333] env[62974]: DEBUG oslo_concurrency.lockutils [None req-08a3f37d-fbca-4da6-8921-7f365e8add12 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "609d36b1-52e2-4747-8a6c-15ee41883174" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.747s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.313586] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "8d4ef589-eef7-4f46-8fc1-172892dac43f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.313806] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.816070] env[62974]: DEBUG nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1378.335661] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.336019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.337491] env[62974]: INFO nova.compute.claims [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1379.391477] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4444cc42-e186-4759-910b-6eea7b28bc3d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.399087] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a59bfd1-f63d-46af-b30a-f71a0457b7a9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.428164] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-decb0a57-ebb2-4903-935a-b5dcc0564ff2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.434829] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca60767c-7ac1-4441-92a5-3d378537d1c1 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.448410] env[62974]: DEBUG nova.compute.provider_tree [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.952022] env[62974]: DEBUG nova.scheduler.client.report [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1380.457096] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.457627] env[62974]: DEBUG nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1380.963030] env[62974]: DEBUG nova.compute.utils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1380.964077] env[62974]: DEBUG nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1380.964283] env[62974]: DEBUG nova.network.neutron [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1381.010775] env[62974]: DEBUG nova.policy [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8155d54c630f4e23af762a7294aeca40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6c48c7303fa45ee856d937f85e96080', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1381.270105] env[62974]: DEBUG nova.network.neutron [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Successfully created port: 89c4fed2-f8d6-4ad0-99e2-e908969b6b3e {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1381.467935] env[62974]: DEBUG nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1382.478107] env[62974]: DEBUG nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1382.504278] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1382.504544] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.504696] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1382.504873] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.505028] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1382.505182] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1382.505430] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1382.505598] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1382.505786] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1382.505918] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1382.506102] env[62974]: DEBUG nova.virt.hardware [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1382.506965] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5e8218-17d2-4fc5-9128-3cda18a86f63 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.515356] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9376f836-f873-4bbd-9f1c-1e4def02240e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.598623] env[62974]: DEBUG nova.compute.manager [req-efc1a4d8-2ac4-4b63-bba3-956fc9b1e3b4 req-3108b58c-9003-4c87-82b5-362852f2e902 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Received event network-vif-plugged-89c4fed2-f8d6-4ad0-99e2-e908969b6b3e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1382.598895] env[62974]: DEBUG oslo_concurrency.lockutils [req-efc1a4d8-2ac4-4b63-bba3-956fc9b1e3b4 req-3108b58c-9003-4c87-82b5-362852f2e902 service nova] Acquiring lock "8d4ef589-eef7-4f46-8fc1-172892dac43f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.599059] env[62974]: DEBUG oslo_concurrency.lockutils [req-efc1a4d8-2ac4-4b63-bba3-956fc9b1e3b4 req-3108b58c-9003-4c87-82b5-362852f2e902 service nova] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.599231] env[62974]: DEBUG oslo_concurrency.lockutils [req-efc1a4d8-2ac4-4b63-bba3-956fc9b1e3b4 req-3108b58c-9003-4c87-82b5-362852f2e902 service nova] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.599401] env[62974]: DEBUG nova.compute.manager [req-efc1a4d8-2ac4-4b63-bba3-956fc9b1e3b4 req-3108b58c-9003-4c87-82b5-362852f2e902 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] No waiting events found dispatching network-vif-plugged-89c4fed2-f8d6-4ad0-99e2-e908969b6b3e {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1382.599565] env[62974]: WARNING nova.compute.manager [req-efc1a4d8-2ac4-4b63-bba3-956fc9b1e3b4 req-3108b58c-9003-4c87-82b5-362852f2e902 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Received unexpected event network-vif-plugged-89c4fed2-f8d6-4ad0-99e2-e908969b6b3e for instance with vm_state building and task_state spawning. [ 1382.675873] env[62974]: DEBUG nova.network.neutron [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Successfully updated port: 89c4fed2-f8d6-4ad0-99e2-e908969b6b3e {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1383.179365] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "refresh_cache-8d4ef589-eef7-4f46-8fc1-172892dac43f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.179489] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "refresh_cache-8d4ef589-eef7-4f46-8fc1-172892dac43f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.179685] env[62974]: DEBUG nova.network.neutron [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1383.717475] env[62974]: DEBUG nova.network.neutron [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1383.838902] env[62974]: DEBUG nova.network.neutron [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Updating instance_info_cache with network_info: [{"id": "89c4fed2-f8d6-4ad0-99e2-e908969b6b3e", "address": "fa:16:3e:c1:d5:58", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c4fed2-f8", "ovs_interfaceid": "89c4fed2-f8d6-4ad0-99e2-e908969b6b3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.341259] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "refresh_cache-8d4ef589-eef7-4f46-8fc1-172892dac43f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.341575] env[62974]: DEBUG nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Instance network_info: |[{"id": "89c4fed2-f8d6-4ad0-99e2-e908969b6b3e", "address": "fa:16:3e:c1:d5:58", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c4fed2-f8", "ovs_interfaceid": "89c4fed2-f8d6-4ad0-99e2-e908969b6b3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1384.342025] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:d5:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89c4fed2-f8d6-4ad0-99e2-e908969b6b3e', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1384.349481] env[62974]: DEBUG oslo.service.loopingcall [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1384.349681] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1384.349900] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94016cf9-aae6-4a89-806f-7c892166f483 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.370710] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1384.370710] env[62974]: value = "task-2655326" [ 1384.370710] env[62974]: _type = "Task" [ 1384.370710] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.378470] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655326, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.624899] env[62974]: DEBUG nova.compute.manager [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Received event network-changed-89c4fed2-f8d6-4ad0-99e2-e908969b6b3e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1384.625111] env[62974]: DEBUG nova.compute.manager [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Refreshing instance network info cache due to event network-changed-89c4fed2-f8d6-4ad0-99e2-e908969b6b3e. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1384.625371] env[62974]: DEBUG oslo_concurrency.lockutils [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] Acquiring lock "refresh_cache-8d4ef589-eef7-4f46-8fc1-172892dac43f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.625521] env[62974]: DEBUG oslo_concurrency.lockutils [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] Acquired lock "refresh_cache-8d4ef589-eef7-4f46-8fc1-172892dac43f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.625685] env[62974]: DEBUG nova.network.neutron [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Refreshing network info cache for port 89c4fed2-f8d6-4ad0-99e2-e908969b6b3e {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1384.880875] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655326, 'name': CreateVM_Task, 'duration_secs': 0.332153} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.881255] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1384.881642] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.881805] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.882151] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1384.882406] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0867343d-3efd-4692-9c46-744879a46cdd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.886733] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1384.886733] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52338a0a-c239-63a5-c697-9899b2b256d6" [ 1384.886733] env[62974]: _type = "Task" [ 1384.886733] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.894267] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52338a0a-c239-63a5-c697-9899b2b256d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.294009] env[62974]: DEBUG nova.network.neutron [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Updated VIF entry in instance network info cache for port 89c4fed2-f8d6-4ad0-99e2-e908969b6b3e. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1385.294430] env[62974]: DEBUG nova.network.neutron [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Updating instance_info_cache with network_info: [{"id": "89c4fed2-f8d6-4ad0-99e2-e908969b6b3e", "address": "fa:16:3e:c1:d5:58", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c4fed2-f8", "ovs_interfaceid": "89c4fed2-f8d6-4ad0-99e2-e908969b6b3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.397080] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52338a0a-c239-63a5-c697-9899b2b256d6, 'name': SearchDatastore_Task, 'duration_secs': 0.01084} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.397368] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.397598] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1385.397823] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.397967] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.398158] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1385.398404] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55c48b0f-9106-4777-83c9-767ab1edc24e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.406818] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1385.406956] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1385.407635] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e668febb-aed5-403f-b783-55e385bbda65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.412745] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1385.412745] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]527e08a2-929c-2292-6f6d-8aba5e32bbc4" [ 1385.412745] env[62974]: _type = "Task" [ 1385.412745] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.420583] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527e08a2-929c-2292-6f6d-8aba5e32bbc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.797810] env[62974]: DEBUG oslo_concurrency.lockutils [req-0451bb2b-530b-4c76-9305-2dbd15ec2f8d req-20f0a959-fc52-44f0-8947-f0d37dbb07e3 service nova] Releasing lock "refresh_cache-8d4ef589-eef7-4f46-8fc1-172892dac43f" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.923048] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]527e08a2-929c-2292-6f6d-8aba5e32bbc4, 'name': SearchDatastore_Task, 'duration_secs': 0.007744} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.923397] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-749cab98-738c-4f75-96a3-4a3d1e7311ba {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.927927] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1385.927927] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52984d2b-e635-64c2-9f09-464bcf53206b" [ 1385.927927] env[62974]: _type = "Task" [ 1385.927927] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.935349] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52984d2b-e635-64c2-9f09-464bcf53206b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.438720] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52984d2b-e635-64c2-9f09-464bcf53206b, 'name': SearchDatastore_Task, 'duration_secs': 0.008285} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.438848] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.439110] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8d4ef589-eef7-4f46-8fc1-172892dac43f/8d4ef589-eef7-4f46-8fc1-172892dac43f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1386.439354] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c52b1b56-5857-4138-af85-7a4574009386 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.446104] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1386.446104] env[62974]: value = "task-2655327" [ 1386.446104] env[62974]: _type = "Task" [ 1386.446104] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.454475] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.955799] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421669} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.956205] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 8d4ef589-eef7-4f46-8fc1-172892dac43f/8d4ef589-eef7-4f46-8fc1-172892dac43f.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1386.956288] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1386.956509] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be1e7c8d-7ecb-492c-8a88-fff7ffd940ce {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.961957] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1386.961957] env[62974]: value = "task-2655328" [ 1386.961957] env[62974]: _type = "Task" [ 1386.961957] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.968673] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.471679] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0573} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.471945] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1387.472830] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cddcf88-a7b6-46d9-a132-80f9b8e111de {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.494029] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] 8d4ef589-eef7-4f46-8fc1-172892dac43f/8d4ef589-eef7-4f46-8fc1-172892dac43f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1387.494289] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0775788-eb01-48ff-8382-08955da11d07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.513258] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1387.513258] env[62974]: value = "task-2655329" [ 1387.513258] env[62974]: _type = "Task" [ 1387.513258] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.520531] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.023770] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655329, 'name': ReconfigVM_Task, 'duration_secs': 0.267763} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.024220] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Reconfigured VM instance instance-0000007b to attach disk [datastore2] 8d4ef589-eef7-4f46-8fc1-172892dac43f/8d4ef589-eef7-4f46-8fc1-172892dac43f.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1388.024694] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ceb2881-ddac-4532-9069-7eff9c34f67a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.030333] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1388.030333] env[62974]: value = "task-2655330" [ 1388.030333] env[62974]: _type = "Task" [ 1388.030333] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.038042] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655330, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.539841] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655330, 'name': Rename_Task, 'duration_secs': 0.135949} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.540126] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1388.540392] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-032e227f-32b5-48a4-b84b-cc27aa0fece9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.546332] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1388.546332] env[62974]: value = "task-2655331" [ 1388.546332] env[62974]: _type = "Task" [ 1388.546332] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.553282] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.056345] env[62974]: DEBUG oslo_vmware.api [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655331, 'name': PowerOnVM_Task, 'duration_secs': 0.422385} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.056738] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1389.056857] env[62974]: INFO nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Took 6.58 seconds to spawn the instance on the hypervisor. [ 1389.057052] env[62974]: DEBUG nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1389.057798] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b1cadb-8909-47de-93da-927d9e45e4f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.573502] env[62974]: INFO nova.compute.manager [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Took 11.25 seconds to build instance. [ 1390.075163] env[62974]: DEBUG oslo_concurrency.lockutils [None req-450eb5ec-c0c8-4e06-8434-35d17b0df9fe tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.761s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.488646] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "8d4ef589-eef7-4f46-8fc1-172892dac43f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.488972] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.489272] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "8d4ef589-eef7-4f46-8fc1-172892dac43f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.489522] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.489772] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.493343] env[62974]: INFO nova.compute.manager [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Terminating instance [ 1390.999232] env[62974]: DEBUG nova.compute.manager [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1390.999458] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1391.000723] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dc65b9-cc46-47fb-bec4-8b4bf54d724e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.006520] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.008716] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.008967] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1391.009213] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.009349] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Cleaning up deleted instances with incomplete migration {{(pid=62974) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1391.010255] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceceabcc-a0d2-4d35-9271-f301e4b0c0c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.016394] env[62974]: DEBUG oslo_vmware.api [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1391.016394] env[62974]: value = "task-2655332" [ 1391.016394] env[62974]: _type = "Task" [ 1391.016394] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.024826] env[62974]: DEBUG oslo_vmware.api [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.513397] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.526672] env[62974]: DEBUG oslo_vmware.api [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655332, 'name': PowerOffVM_Task, 'duration_secs': 0.230341} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.526928] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1391.527104] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1391.527340] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51ad42bc-619a-4ba0-8692-946db8ff8f65 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.596776] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1391.596979] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1391.597171] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleting the datastore file [datastore2] 8d4ef589-eef7-4f46-8fc1-172892dac43f {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1391.597425] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8191a34-57e1-4578-b3d6-bb562cfea138 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.603674] env[62974]: DEBUG oslo_vmware.api [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1391.603674] env[62974]: value = "task-2655334" [ 1391.603674] env[62974]: _type = "Task" [ 1391.603674] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.611706] env[62974]: DEBUG oslo_vmware.api [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655334, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.113222] env[62974]: DEBUG oslo_vmware.api [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655334, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147135} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.113476] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1392.113657] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1392.113831] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1392.114010] env[62974]: INFO nova.compute.manager [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1392.114284] env[62974]: DEBUG oslo.service.loopingcall [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1392.114479] env[62974]: DEBUG nova.compute.manager [-] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1392.114572] env[62974]: DEBUG nova.network.neutron [-] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1392.578074] env[62974]: DEBUG nova.compute.manager [req-37807f2f-e7ed-4632-bf5e-19c5fdbce96f req-7ed7a5d3-24bd-4474-a975-f69bae80abdb service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Received event network-vif-deleted-89c4fed2-f8d6-4ad0-99e2-e908969b6b3e {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1392.578310] env[62974]: INFO nova.compute.manager [req-37807f2f-e7ed-4632-bf5e-19c5fdbce96f req-7ed7a5d3-24bd-4474-a975-f69bae80abdb service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Neutron deleted interface 89c4fed2-f8d6-4ad0-99e2-e908969b6b3e; detaching it from the instance and deleting it from the info cache [ 1392.578468] env[62974]: DEBUG nova.network.neutron [req-37807f2f-e7ed-4632-bf5e-19c5fdbce96f req-7ed7a5d3-24bd-4474-a975-f69bae80abdb service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.012390] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.058062] env[62974]: DEBUG nova.network.neutron [-] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.082034] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81d6933a-809a-453d-a7e1-916df7768630 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.093401] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed182493-1622-45ec-bb46-85ec6ad6ca0d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.119238] env[62974]: DEBUG nova.compute.manager [req-37807f2f-e7ed-4632-bf5e-19c5fdbce96f req-7ed7a5d3-24bd-4474-a975-f69bae80abdb service nova] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Detach interface failed, port_id=89c4fed2-f8d6-4ad0-99e2-e908969b6b3e, reason: Instance 8d4ef589-eef7-4f46-8fc1-172892dac43f could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1393.516225] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.516478] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.516644] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.516790] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1393.517901] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aceab62-f57c-4473-9c22-eac77642e520 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.525929] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9029428-af9a-4bef-a3ae-5003820cf461 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.540653] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d170c3-840b-4df6-848d-1a0ca2c410eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.546954] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6fb10a-06ba-4c5a-b03b-beef0cce558b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.576320] env[62974]: INFO nova.compute.manager [-] [instance: 8d4ef589-eef7-4f46-8fc1-172892dac43f] Took 1.46 seconds to deallocate network for instance. [ 1393.576651] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181127MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1393.576788] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.576977] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.084710] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.632243] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance df3a9d82-1563-4960-a69a-870b3d440081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1394.632423] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 609d36b1-52e2-4747-8a6c-15ee41883174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1394.632550] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Instance 8d4ef589-eef7-4f46-8fc1-172892dac43f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1394.632730] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1394.632866] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1394.678584] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8198765-982c-4486-b33e-5e304807b116 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.687094] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5029e118-8af6-42ea-9b79-142742891716 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.715755] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6622b86f-f700-46a0-8047-ce617e8dc4ff {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.722440] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1a0a7e-504b-4c73-b5da-90bd20d548d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.734790] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.238399] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1395.744052] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1395.744052] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.167s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.744356] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.660s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.744669] env[62974]: DEBUG nova.objects.instance [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'resources' on Instance uuid 8d4ef589-eef7-4f46-8fc1-172892dac43f {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1395.745786] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1395.745929] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Cleaning up deleted instances {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1396.254928] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] There are 24 instances to clean {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1396.255371] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: dac46c56-298c-4556-9e2b-4870f227508c] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1396.302566] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90c2cdf-a59b-434c-a1a7-3da39e972b9f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.311245] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb0dffc-6bf2-439b-8215-97976e1dac4d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.341109] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdff1eb-1226-440e-835d-7bcd6f1552f3 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.348427] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348acb49-1657-4954-9110-1f6522382f07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.361269] env[62974]: DEBUG nova.compute.provider_tree [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.761115] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 8ccf385f-5718-4a68-a54c-7aa1d820fa0f] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1396.864098] env[62974]: DEBUG nova.scheduler.client.report [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1397.264760] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 01d0c91c-1724-453c-8d83-8f9e77afcef1] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1397.368764] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.388102] env[62974]: INFO nova.scheduler.client.report [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted allocations for instance 8d4ef589-eef7-4f46-8fc1-172892dac43f [ 1397.768080] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: e71134bd-23a1-4cc3-9e85-e8b6054be6d5] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1397.895530] env[62974]: DEBUG oslo_concurrency.lockutils [None req-987bb6ed-2b59-4321-ba44-96e65d1097e2 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "8d4ef589-eef7-4f46-8fc1-172892dac43f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.406s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.124108] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "609d36b1-52e2-4747-8a6c-15ee41883174" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.124415] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "609d36b1-52e2-4747-8a6c-15ee41883174" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.124635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "609d36b1-52e2-4747-8a6c-15ee41883174-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.124815] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "609d36b1-52e2-4747-8a6c-15ee41883174-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.124981] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "609d36b1-52e2-4747-8a6c-15ee41883174-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.127157] env[62974]: INFO nova.compute.manager [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Terminating instance [ 1398.271863] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 21156bad-9fc2-4cd3-97f9-bd1d0a8bd6be] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1398.631014] env[62974]: DEBUG nova.compute.manager [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1398.631282] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1398.632201] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b569c12-49c2-4734-b3e9-f0b8c4956ec5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.640455] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1398.640680] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd281755-fe28-4d6e-a698-a4361410fa32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.647576] env[62974]: DEBUG oslo_vmware.api [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1398.647576] env[62974]: value = "task-2655335" [ 1398.647576] env[62974]: _type = "Task" [ 1398.647576] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.655137] env[62974]: DEBUG oslo_vmware.api [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.775650] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 9450a3f2-4b2b-4022-842f-f24a8c470098] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1399.157748] env[62974]: DEBUG oslo_vmware.api [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655335, 'name': PowerOffVM_Task, 'duration_secs': 0.179065} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.157969] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1399.158110] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1399.158370] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ee67946-ad28-4c1e-b8ed-0a731002b2d2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.279659] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: a94cb966-5304-4484-8639-899d7211e8b6] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1399.351823] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1399.352071] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1399.352249] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleting the datastore file [datastore2] 609d36b1-52e2-4747-8a6c-15ee41883174 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1399.352505] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbe80309-fcf7-4bb8-ac26-1d26448083eb {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.359374] env[62974]: DEBUG oslo_vmware.api [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1399.359374] env[62974]: value = "task-2655337" [ 1399.359374] env[62974]: _type = "Task" [ 1399.359374] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.366841] env[62974]: DEBUG oslo_vmware.api [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.782814] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 255a1d01-e007-45e5-a2c9-798223f41b30] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1399.869517] env[62974]: DEBUG oslo_vmware.api [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199767} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.869960] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1399.870165] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1399.870338] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1399.870511] env[62974]: INFO nova.compute.manager [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1399.870733] env[62974]: DEBUG oslo.service.loopingcall [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.870913] env[62974]: DEBUG nova.compute.manager [-] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1399.871018] env[62974]: DEBUG nova.network.neutron [-] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1400.109904] env[62974]: DEBUG nova.compute.manager [req-250e055c-4495-4378-aad5-bccbc7b10f5b req-2a884511-f55c-489f-9ff3-10b0ff399171 service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Received event network-vif-deleted-3f3974df-4488-4442-880c-08db0733d671 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1400.110154] env[62974]: INFO nova.compute.manager [req-250e055c-4495-4378-aad5-bccbc7b10f5b req-2a884511-f55c-489f-9ff3-10b0ff399171 service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Neutron deleted interface 3f3974df-4488-4442-880c-08db0733d671; detaching it from the instance and deleting it from the info cache [ 1400.110438] env[62974]: DEBUG nova.network.neutron [req-250e055c-4495-4378-aad5-bccbc7b10f5b req-2a884511-f55c-489f-9ff3-10b0ff399171 service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.286326] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 007a5e28-7891-4327-ba39-bb9da8e32495] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1400.593688] env[62974]: DEBUG nova.network.neutron [-] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.612641] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4889174-d8a6-4d16-86a8-c6bbda78377d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.622229] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617a4f2b-8506-4c73-a365-63d2c30e59ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.645937] env[62974]: DEBUG nova.compute.manager [req-250e055c-4495-4378-aad5-bccbc7b10f5b req-2a884511-f55c-489f-9ff3-10b0ff399171 service nova] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Detach interface failed, port_id=3f3974df-4488-4442-880c-08db0733d671, reason: Instance 609d36b1-52e2-4747-8a6c-15ee41883174 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1400.790675] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 744a685d-845e-4818-abb5-c70056fd4cd0] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1401.096379] env[62974]: INFO nova.compute.manager [-] [instance: 609d36b1-52e2-4747-8a6c-15ee41883174] Took 1.23 seconds to deallocate network for instance. [ 1401.293730] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 521b463f-98f9-4365-b446-5de9af79f220] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1401.604944] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.605233] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.605497] env[62974]: DEBUG nova.objects.instance [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'resources' on Instance uuid 609d36b1-52e2-4747-8a6c-15ee41883174 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1401.796618] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: d7ca15a3-edd2-48a2-9ee0-5d2072f1310a] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1402.150054] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d96ac0-9a2f-40fa-8179-5f90a510b29b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.157854] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639582c7-b2b7-4159-9b5a-b99c26c0861f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.190027] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec0865a-cfe6-45d3-9a7d-ee6362126a62 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.197201] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e379b27a-f988-4301-8d64-2fba762fe12c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.210695] env[62974]: DEBUG nova.compute.provider_tree [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1402.299411] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 2a962aab-3057-43df-97f7-b63ce808fb90] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1402.714209] env[62974]: DEBUG nova.scheduler.client.report [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1402.802180] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: b2d46229-31a9-4be1-bd17-5411deb4944c] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1403.218563] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.613s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.238347] env[62974]: INFO nova.scheduler.client.report [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted allocations for instance 609d36b1-52e2-4747-8a6c-15ee41883174 [ 1403.305052] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c3801f86-5aaa-42cd-a6b2-1b72b77aa74c] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1403.745810] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b5b9a7f7-3af0-4793-bb87-d5e00c53a2d8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "609d36b1-52e2-4747-8a6c-15ee41883174" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.621s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.807705] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 72b0b643-7747-4dae-9d85-c8c6a573ce07] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1404.310937] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 220295bf-b021-4800-bc7e-a3dd311c747a] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1404.814649] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: a44cca2f-9286-490a-9013-1fea30984fa5] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1405.064646] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.064912] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.317993] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 4de11643-da0a-453f-b03e-ca19819f4f06] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1405.567841] env[62974]: DEBUG nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1405.821437] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 11bd6a5d-9590-4aa3-aaf3-99d2ac394553] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1406.089946] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.090265] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.091927] env[62974]: INFO nova.compute.claims [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1406.325344] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: c90c9a6d-661f-4574-8a0d-7d8cacf8618d] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1406.829292] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: dca952df-dac9-4502-948b-24ac6fb939f9] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1407.140240] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ceb692-879d-4816-bd3f-022f16a1574f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.148418] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf67d641-892d-4cc5-8fa2-a73f89368941 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.179330] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71b52ea-dc85-462c-b221-5f505bd6f612 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.186913] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad33c80-2ea2-4771-b992-983f372a16c9 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.201402] env[62974]: DEBUG nova.compute.provider_tree [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1407.332921] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 514e0f15-f27d-4fab-9107-b92884075420] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1407.704736] env[62974]: DEBUG nova.scheduler.client.report [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1407.836415] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: cf73422d-7f4b-4bae-9d69-de74d7211243] Instance has had 0 of 5 cleanup attempts {{(pid=62974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1408.209952] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.119s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.210454] env[62974]: DEBUG nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1408.715736] env[62974]: DEBUG nova.compute.utils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1408.717436] env[62974]: DEBUG nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1408.717542] env[62974]: DEBUG nova.network.neutron [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1408.763433] env[62974]: DEBUG nova.policy [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8155d54c630f4e23af762a7294aeca40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6c48c7303fa45ee856d937f85e96080', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1409.013528] env[62974]: DEBUG nova.network.neutron [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Successfully created port: 01677500-7f93-4c86-ae49-8074c3dee9b3 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1409.220315] env[62974]: DEBUG nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1409.333764] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.334018] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.334203] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1409.334341] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Rebuilding the list of instances to heal {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1409.837521] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Skipping network cache update for instance because it is Building. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1409.863543] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.863693] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquired lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.863856] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Forcefully refreshing network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1409.864045] env[62974]: DEBUG nova.objects.instance [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lazy-loading 'info_cache' on Instance uuid df3a9d82-1563-4960-a69a-870b3d440081 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.230010] env[62974]: DEBUG nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1410.255481] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1410.255722] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.255874] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1410.256061] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.256208] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1410.256365] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1410.256597] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1410.256781] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1410.256956] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1410.257132] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1410.257304] env[62974]: DEBUG nova.virt.hardware [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1410.258156] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68df76cc-b310-4081-b73d-96c871af7bc0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.265807] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2e8224-68ac-46f9-940c-1d5b3436898a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.361647] env[62974]: DEBUG nova.compute.manager [req-3971aee8-0a65-4511-ab50-8f25fd80d13c req-8e17c05c-b6a8-492e-a3d3-f1391ef005ea service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Received event network-vif-plugged-01677500-7f93-4c86-ae49-8074c3dee9b3 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1410.361888] env[62974]: DEBUG oslo_concurrency.lockutils [req-3971aee8-0a65-4511-ab50-8f25fd80d13c req-8e17c05c-b6a8-492e-a3d3-f1391ef005ea service nova] Acquiring lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.362107] env[62974]: DEBUG oslo_concurrency.lockutils [req-3971aee8-0a65-4511-ab50-8f25fd80d13c req-8e17c05c-b6a8-492e-a3d3-f1391ef005ea service nova] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.362277] env[62974]: DEBUG oslo_concurrency.lockutils [req-3971aee8-0a65-4511-ab50-8f25fd80d13c req-8e17c05c-b6a8-492e-a3d3-f1391ef005ea service nova] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.362478] env[62974]: DEBUG nova.compute.manager [req-3971aee8-0a65-4511-ab50-8f25fd80d13c req-8e17c05c-b6a8-492e-a3d3-f1391ef005ea service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] No waiting events found dispatching network-vif-plugged-01677500-7f93-4c86-ae49-8074c3dee9b3 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1410.362592] env[62974]: WARNING nova.compute.manager [req-3971aee8-0a65-4511-ab50-8f25fd80d13c req-8e17c05c-b6a8-492e-a3d3-f1391ef005ea service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Received unexpected event network-vif-plugged-01677500-7f93-4c86-ae49-8074c3dee9b3 for instance with vm_state building and task_state spawning. [ 1410.440094] env[62974]: DEBUG nova.network.neutron [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Successfully updated port: 01677500-7f93-4c86-ae49-8074c3dee9b3 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.945293] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "refresh_cache-59f423a5-e66a-4c05-9c0a-c1a0dbf19216" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.945293] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "refresh_cache-59f423a5-e66a-4c05-9c0a-c1a0dbf19216" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.945293] env[62974]: DEBUG nova.network.neutron [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1411.488360] env[62974]: DEBUG nova.network.neutron [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1411.635373] env[62974]: DEBUG nova.network.neutron [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updating instance_info_cache with network_info: [{"id": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "address": "fa:16:3e:1e:2e:95", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfce7e27-d5", "ovs_interfaceid": "dfce7e27-d5b3-43be-b3ab-52006b1587bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.658710] env[62974]: DEBUG nova.network.neutron [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Updating instance_info_cache with network_info: [{"id": "01677500-7f93-4c86-ae49-8074c3dee9b3", "address": "fa:16:3e:ab:1f:57", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01677500-7f", "ovs_interfaceid": "01677500-7f93-4c86-ae49-8074c3dee9b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.137812] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Releasing lock "refresh_cache-df3a9d82-1563-4960-a69a-870b3d440081" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.138242] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updated the network info_cache for instance {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1412.138304] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.138726] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.138726] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.138813] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.138926] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1412.160593] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "refresh_cache-59f423a5-e66a-4c05-9c0a-c1a0dbf19216" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.160870] env[62974]: DEBUG nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Instance network_info: |[{"id": "01677500-7f93-4c86-ae49-8074c3dee9b3", "address": "fa:16:3e:ab:1f:57", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01677500-7f", "ovs_interfaceid": "01677500-7f93-4c86-ae49-8074c3dee9b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1412.161317] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:1f:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01677500-7f93-4c86-ae49-8074c3dee9b3', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1412.168844] env[62974]: DEBUG oslo.service.loopingcall [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.169063] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1412.169287] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fcb50f9-1f5e-4646-bdd9-142432c87430 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.188979] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1412.188979] env[62974]: value = "task-2655338" [ 1412.188979] env[62974]: _type = "Task" [ 1412.188979] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.199356] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655338, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.388782] env[62974]: DEBUG nova.compute.manager [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Received event network-changed-01677500-7f93-4c86-ae49-8074c3dee9b3 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1412.388997] env[62974]: DEBUG nova.compute.manager [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Refreshing instance network info cache due to event network-changed-01677500-7f93-4c86-ae49-8074c3dee9b3. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1412.389236] env[62974]: DEBUG oslo_concurrency.lockutils [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] Acquiring lock "refresh_cache-59f423a5-e66a-4c05-9c0a-c1a0dbf19216" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.389472] env[62974]: DEBUG oslo_concurrency.lockutils [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] Acquired lock "refresh_cache-59f423a5-e66a-4c05-9c0a-c1a0dbf19216" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.389608] env[62974]: DEBUG nova.network.neutron [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Refreshing network info cache for port 01677500-7f93-4c86-ae49-8074c3dee9b3 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1412.698850] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655338, 'name': CreateVM_Task, 'duration_secs': 0.294224} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.699094] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1412.699635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.699803] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.700161] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1412.700407] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7de6d90-8539-48c4-bb9e-571528254f8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.704468] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1412.704468] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4edfb-7569-332b-39c6-0d806bbf7ab8" [ 1412.704468] env[62974]: _type = "Task" [ 1412.704468] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.711302] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4edfb-7569-332b-39c6-0d806bbf7ab8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.061968] env[62974]: DEBUG nova.network.neutron [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Updated VIF entry in instance network info cache for port 01677500-7f93-4c86-ae49-8074c3dee9b3. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1413.062360] env[62974]: DEBUG nova.network.neutron [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Updating instance_info_cache with network_info: [{"id": "01677500-7f93-4c86-ae49-8074c3dee9b3", "address": "fa:16:3e:ab:1f:57", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01677500-7f", "ovs_interfaceid": "01677500-7f93-4c86-ae49-8074c3dee9b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.214825] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52f4edfb-7569-332b-39c6-0d806bbf7ab8, 'name': SearchDatastore_Task, 'duration_secs': 0.008567} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.215191] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.215324] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1413.215527] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.215688] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.215865] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.216121] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-426c168d-2ecd-4a57-89ba-84279bcbb459 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.223584] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.223748] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1413.224458] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3883c97-f23a-40ab-a666-019220f3c883 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.229177] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1413.229177] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd40fa-9bad-e41c-5f5a-ef610880c672" [ 1413.229177] env[62974]: _type = "Task" [ 1413.229177] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.236563] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd40fa-9bad-e41c-5f5a-ef610880c672, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.564692] env[62974]: DEBUG oslo_concurrency.lockutils [req-b2152eec-eefa-41c9-974a-ec5ff1c1a10a req-ed0c0f2b-5d59-4511-8c8d-af59e4d69b68 service nova] Releasing lock "refresh_cache-59f423a5-e66a-4c05-9c0a-c1a0dbf19216" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.739421] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52cd40fa-9bad-e41c-5f5a-ef610880c672, 'name': SearchDatastore_Task, 'duration_secs': 0.007871} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.740170] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42633f7-1542-4fc3-803a-5d23df82c0d0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.745075] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1413.745075] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b1f991-9a76-8bf6-bd7e-63020adbe9d1" [ 1413.745075] env[62974]: _type = "Task" [ 1413.745075] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.751974] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b1f991-9a76-8bf6-bd7e-63020adbe9d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.255426] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52b1f991-9a76-8bf6-bd7e-63020adbe9d1, 'name': SearchDatastore_Task, 'duration_secs': 0.008817} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.255724] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.255938] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 59f423a5-e66a-4c05-9c0a-c1a0dbf19216/59f423a5-e66a-4c05-9c0a-c1a0dbf19216.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1414.256229] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-baf48a5d-8a60-46ed-8814-f6084fba06d5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.262786] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1414.262786] env[62974]: value = "task-2655339" [ 1414.262786] env[62974]: _type = "Task" [ 1414.262786] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.270058] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.772994] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655339, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.436943} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.773261] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 59f423a5-e66a-4c05-9c0a-c1a0dbf19216/59f423a5-e66a-4c05-9c0a-c1a0dbf19216.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1414.773464] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1414.773705] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa57cb83-6d64-4e44-960e-8d03995055ec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.781106] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1414.781106] env[62974]: value = "task-2655340" [ 1414.781106] env[62974]: _type = "Task" [ 1414.781106] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.787920] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.290918] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060554} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.291215] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.291938] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc21fa4c-c1aa-4e28-b3c4-93df681ae032 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.312722] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 59f423a5-e66a-4c05-9c0a-c1a0dbf19216/59f423a5-e66a-4c05-9c0a-c1a0dbf19216.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.312967] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d77aa906-0e3c-460a-962f-5fb5e1c7ca56 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.331128] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1415.331128] env[62974]: value = "task-2655341" [ 1415.331128] env[62974]: _type = "Task" [ 1415.331128] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.338074] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655341, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.840666] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655341, 'name': ReconfigVM_Task, 'duration_secs': 0.291854} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.840931] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 59f423a5-e66a-4c05-9c0a-c1a0dbf19216/59f423a5-e66a-4c05-9c0a-c1a0dbf19216.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1415.841484] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff878164-328a-4b47-b8c7-8cfd59af4c31 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.847736] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1415.847736] env[62974]: value = "task-2655342" [ 1415.847736] env[62974]: _type = "Task" [ 1415.847736] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.855912] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655342, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.357514] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655342, 'name': Rename_Task, 'duration_secs': 0.166336} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.357821] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1416.358023] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd954968-029d-4a56-96d7-6726248a9593 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.363835] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1416.363835] env[62974]: value = "task-2655343" [ 1416.363835] env[62974]: _type = "Task" [ 1416.363835] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.370930] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.874041] env[62974]: DEBUG oslo_vmware.api [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655343, 'name': PowerOnVM_Task, 'duration_secs': 0.440664} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.874285] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1416.874476] env[62974]: INFO nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Took 6.64 seconds to spawn the instance on the hypervisor. [ 1416.874654] env[62974]: DEBUG nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1416.875417] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795e46fb-bce9-4797-a248-92dc04497727 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.392078] env[62974]: INFO nova.compute.manager [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Took 11.32 seconds to build instance. [ 1417.893308] env[62974]: DEBUG oslo_concurrency.lockutils [None req-aaf57f37-8a82-49e0-b497-d445ca55f50a tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.828s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.280995] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.281315] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.281529] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.281716] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.281886] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.284392] env[62974]: INFO nova.compute.manager [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Terminating instance [ 1418.788359] env[62974]: DEBUG nova.compute.manager [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1418.788754] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1418.789521] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c66ee04-abe7-4ab3-8690-3e6c1d4af95a {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.797239] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1418.797466] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6c2f148-b8de-48bb-9ac5-baaf439f252e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.803243] env[62974]: DEBUG oslo_vmware.api [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1418.803243] env[62974]: value = "task-2655344" [ 1418.803243] env[62974]: _type = "Task" [ 1418.803243] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.810786] env[62974]: DEBUG oslo_vmware.api [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.313721] env[62974]: DEBUG oslo_vmware.api [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655344, 'name': PowerOffVM_Task, 'duration_secs': 0.170551} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.314097] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1419.314309] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1419.314592] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d42d140-2542-4fa0-97d4-c1407b6fcccd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.378413] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1419.378652] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1419.378806] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleting the datastore file [datastore2] 59f423a5-e66a-4c05-9c0a-c1a0dbf19216 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.379088] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b55319b3-a395-4b2c-81a7-06243e6f32a0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.384990] env[62974]: DEBUG oslo_vmware.api [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1419.384990] env[62974]: value = "task-2655346" [ 1419.384990] env[62974]: _type = "Task" [ 1419.384990] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.392663] env[62974]: DEBUG oslo_vmware.api [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.894226] env[62974]: DEBUG oslo_vmware.api [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135862} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.894611] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1419.894671] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1419.894828] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1419.894998] env[62974]: INFO nova.compute.manager [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1419.895261] env[62974]: DEBUG oslo.service.loopingcall [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1419.895458] env[62974]: DEBUG nova.compute.manager [-] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1419.895566] env[62974]: DEBUG nova.network.neutron [-] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1420.162966] env[62974]: DEBUG nova.compute.manager [req-2ba0c0f1-bf14-4235-ab6f-ec968f95cf42 req-b4c5b53a-c5b8-499c-9239-97fc965d7a32 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Received event network-vif-deleted-01677500-7f93-4c86-ae49-8074c3dee9b3 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1420.163379] env[62974]: INFO nova.compute.manager [req-2ba0c0f1-bf14-4235-ab6f-ec968f95cf42 req-b4c5b53a-c5b8-499c-9239-97fc965d7a32 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Neutron deleted interface 01677500-7f93-4c86-ae49-8074c3dee9b3; detaching it from the instance and deleting it from the info cache [ 1420.163455] env[62974]: DEBUG nova.network.neutron [req-2ba0c0f1-bf14-4235-ab6f-ec968f95cf42 req-b4c5b53a-c5b8-499c-9239-97fc965d7a32 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.640211] env[62974]: DEBUG nova.network.neutron [-] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.665603] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-417936f5-a8e2-434f-bd9d-201ecd6ba829 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.677010] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92245471-42ee-42c6-bb5d-c6bc99e76d05 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.702638] env[62974]: DEBUG nova.compute.manager [req-2ba0c0f1-bf14-4235-ab6f-ec968f95cf42 req-b4c5b53a-c5b8-499c-9239-97fc965d7a32 service nova] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Detach interface failed, port_id=01677500-7f93-4c86-ae49-8074c3dee9b3, reason: Instance 59f423a5-e66a-4c05-9c0a-c1a0dbf19216 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1421.143080] env[62974]: INFO nova.compute.manager [-] [instance: 59f423a5-e66a-4c05-9c0a-c1a0dbf19216] Took 1.25 seconds to deallocate network for instance. [ 1421.649550] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.649837] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.650083] env[62974]: DEBUG nova.objects.instance [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'resources' on Instance uuid 59f423a5-e66a-4c05-9c0a-c1a0dbf19216 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1422.169954] env[62974]: DEBUG nova.scheduler.client.report [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Refreshing inventories for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1422.184255] env[62974]: DEBUG nova.scheduler.client.report [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Updating ProviderTree inventory for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1422.184506] env[62974]: DEBUG nova.compute.provider_tree [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Updating inventory in ProviderTree for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1422.195138] env[62974]: DEBUG nova.scheduler.client.report [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Refreshing aggregate associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, aggregates: None {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1422.211384] env[62974]: DEBUG nova.scheduler.client.report [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Refreshing trait associations for resource provider bd3bd9ae-180c-41cf-831e-3dd3892efa18, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1422.245818] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056e19be-e059-4823-8d3f-ae8fe17afa1e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.253094] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd28f0c-a40f-466b-a9f3-5981b6b542ef {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.282681] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d930c7-8259-43a2-af96-489fcd9d4665 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.289624] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e32310b-a5fa-4351-973f-26dd3f36755d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.302134] env[62974]: DEBUG nova.compute.provider_tree [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1422.804923] env[62974]: DEBUG nova.scheduler.client.report [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1423.310970] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.330915] env[62974]: INFO nova.scheduler.client.report [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted allocations for instance 59f423a5-e66a-4c05-9c0a-c1a0dbf19216 [ 1423.838858] env[62974]: DEBUG oslo_concurrency.lockutils [None req-fd684c90-3556-40ea-bb6d-58bb58ba75ae tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "59f423a5-e66a-4c05-9c0a-c1a0dbf19216" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.557s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.204326] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.204682] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.707187] env[62974]: DEBUG nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Starting instance... {{(pid=62974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1426.229237] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.229516] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.230866] env[62974]: INFO nova.compute.claims [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1427.274501] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934b1031-68e0-48b8-8c0c-3bd2cc271cec {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.281679] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe6495-af7e-420c-a0f2-a677eb340731 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.310184] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008921d5-aa1a-44eb-b305-effb39436888 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.316726] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4c162a-bdcf-4799-b9c8-0d5bb20dacf4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.329224] env[62974]: DEBUG nova.compute.provider_tree [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.832576] env[62974]: DEBUG nova.scheduler.client.report [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1428.337461] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.108s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.337969] env[62974]: DEBUG nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Start building networks asynchronously for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1428.842664] env[62974]: DEBUG nova.compute.utils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Using /dev/sd instead of None {{(pid=62974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1428.844052] env[62974]: DEBUG nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Allocating IP information in the background. {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1428.844227] env[62974]: DEBUG nova.network.neutron [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] allocate_for_instance() {{(pid=62974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1428.890285] env[62974]: DEBUG nova.policy [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8155d54c630f4e23af762a7294aeca40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6c48c7303fa45ee856d937f85e96080', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62974) authorize /opt/stack/nova/nova/policy.py:192}} [ 1429.149407] env[62974]: DEBUG nova.network.neutron [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Successfully created port: 7f40b09d-0576-4af9-87a9-c0eb42af3847 {{(pid=62974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1429.347975] env[62974]: DEBUG nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Start building block device mappings for instance. {{(pid=62974) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1430.358486] env[62974]: DEBUG nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Start spawning the instance on the hypervisor. {{(pid=62974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1431.033842] env[62974]: DEBUG nova.network.neutron [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Successfully updated port: 7f40b09d-0576-4af9-87a9-c0eb42af3847 {{(pid=62974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1431.037344] env[62974]: DEBUG nova.compute.manager [req-fee04ad0-1af1-4245-aa09-c513e68866ee req-87f4bf05-b6fc-4875-a889-62671538ca0f service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Received event network-vif-plugged-7f40b09d-0576-4af9-87a9-c0eb42af3847 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1431.037344] env[62974]: DEBUG oslo_concurrency.lockutils [req-fee04ad0-1af1-4245-aa09-c513e68866ee req-87f4bf05-b6fc-4875-a889-62671538ca0f service nova] Acquiring lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.037344] env[62974]: DEBUG oslo_concurrency.lockutils [req-fee04ad0-1af1-4245-aa09-c513e68866ee req-87f4bf05-b6fc-4875-a889-62671538ca0f service nova] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.037344] env[62974]: DEBUG oslo_concurrency.lockutils [req-fee04ad0-1af1-4245-aa09-c513e68866ee req-87f4bf05-b6fc-4875-a889-62671538ca0f service nova] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.037499] env[62974]: DEBUG nova.compute.manager [req-fee04ad0-1af1-4245-aa09-c513e68866ee req-87f4bf05-b6fc-4875-a889-62671538ca0f service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] No waiting events found dispatching network-vif-plugged-7f40b09d-0576-4af9-87a9-c0eb42af3847 {{(pid=62974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1431.037687] env[62974]: WARNING nova.compute.manager [req-fee04ad0-1af1-4245-aa09-c513e68866ee req-87f4bf05-b6fc-4875-a889-62671538ca0f service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Received unexpected event network-vif-plugged-7f40b09d-0576-4af9-87a9-c0eb42af3847 for instance with vm_state building and task_state spawning. [ 1431.059968] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-19T03:48:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-19T03:48:33Z,direct_url=,disk_format='vmdk',id=807f8582-499f-47ee-9d5b-755c9f39bc39,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='37ee788d98c44826be80135caef4b658',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-19T03:48:34Z,virtual_size=,visibility=), allow threads: False {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1431.060239] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1431.060395] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image limits 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1431.060575] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Flavor pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1431.060720] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Image pref 0:0:0 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1431.060861] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1431.061072] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1431.061233] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1431.061396] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Got 1 possible topologies {{(pid=62974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1431.061553] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1431.061722] env[62974]: DEBUG nova.virt.hardware [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1431.062606] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0690fd-8093-4c20-ab0f-788a9e61c0da {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.070480] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728e809c-529b-42d5-af3c-ad0633b0d270 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.537336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "refresh_cache-15226e5e-f6ae-418a-ab28-0e2d69cbe24d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.537336] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "refresh_cache-15226e5e-f6ae-418a-ab28-0e2d69cbe24d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.537767] env[62974]: DEBUG nova.network.neutron [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Building network info cache for instance {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1432.068781] env[62974]: DEBUG nova.network.neutron [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Instance cache missing network info. {{(pid=62974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1432.188375] env[62974]: DEBUG nova.network.neutron [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Updating instance_info_cache with network_info: [{"id": "7f40b09d-0576-4af9-87a9-c0eb42af3847", "address": "fa:16:3e:c4:33:2c", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f40b09d-05", "ovs_interfaceid": "7f40b09d-0576-4af9-87a9-c0eb42af3847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.519883] env[62974]: DEBUG nova.compute.manager [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Received event network-changed-7f40b09d-0576-4af9-87a9-c0eb42af3847 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1432.520100] env[62974]: DEBUG nova.compute.manager [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Refreshing instance network info cache due to event network-changed-7f40b09d-0576-4af9-87a9-c0eb42af3847. {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1432.520269] env[62974]: DEBUG oslo_concurrency.lockutils [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] Acquiring lock "refresh_cache-15226e5e-f6ae-418a-ab28-0e2d69cbe24d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.690698] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "refresh_cache-15226e5e-f6ae-418a-ab28-0e2d69cbe24d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.691039] env[62974]: DEBUG nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Instance network_info: |[{"id": "7f40b09d-0576-4af9-87a9-c0eb42af3847", "address": "fa:16:3e:c4:33:2c", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f40b09d-05", "ovs_interfaceid": "7f40b09d-0576-4af9-87a9-c0eb42af3847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1432.691364] env[62974]: DEBUG oslo_concurrency.lockutils [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] Acquired lock "refresh_cache-15226e5e-f6ae-418a-ab28-0e2d69cbe24d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.691542] env[62974]: DEBUG nova.network.neutron [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Refreshing network info cache for port 7f40b09d-0576-4af9-87a9-c0eb42af3847 {{(pid=62974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1432.693691] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:33:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f40b09d-0576-4af9-87a9-c0eb42af3847', 'vif_model': 'vmxnet3'}] {{(pid=62974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1432.701114] env[62974]: DEBUG oslo.service.loopingcall [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.702012] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Creating VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1432.702251] env[62974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fbe2795-0640-4b5e-b539-21d77501ff81 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.721138] env[62974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1432.721138] env[62974]: value = "task-2655347" [ 1432.721138] env[62974]: _type = "Task" [ 1432.721138] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.728624] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655347, 'name': CreateVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.230801] env[62974]: DEBUG oslo_vmware.api [-] Task: {'id': task-2655347, 'name': CreateVM_Task, 'duration_secs': 0.305184} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.232885] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Created VM on the ESX host {{(pid=62974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1433.233794] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.233955] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.234270] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1433.234779] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-524f0555-9054-4c6a-bade-ca5b4455c8ab {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.238886] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1433.238886] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e83757-df68-e4ea-e50e-e9c8cfed88f9" [ 1433.238886] env[62974]: _type = "Task" [ 1433.238886] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.245940] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e83757-df68-e4ea-e50e-e9c8cfed88f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.375012] env[62974]: DEBUG nova.network.neutron [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Updated VIF entry in instance network info cache for port 7f40b09d-0576-4af9-87a9-c0eb42af3847. {{(pid=62974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1433.375364] env[62974]: DEBUG nova.network.neutron [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Updating instance_info_cache with network_info: [{"id": "7f40b09d-0576-4af9-87a9-c0eb42af3847", "address": "fa:16:3e:c4:33:2c", "network": {"id": "6fa37320-2b1d-423b-b1fd-55ff8ef6f923", "bridge": "br-int", "label": "tempest-ServersTestJSON-89771278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6c48c7303fa45ee856d937f85e96080", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f40b09d-05", "ovs_interfaceid": "7f40b09d-0576-4af9-87a9-c0eb42af3847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.748607] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52e83757-df68-e4ea-e50e-e9c8cfed88f9, 'name': SearchDatastore_Task, 'duration_secs': 0.01159} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.748909] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.749123] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Processing image 807f8582-499f-47ee-9d5b-755c9f39bc39 {{(pid=62974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1433.749349] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.749491] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.749664] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1433.749916] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a360aa4e-f109-4efb-9227-67c1303832cd {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.757781] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1433.757948] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1433.758617] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-044e9043-69b9-4987-85f6-f784df8e8bcc {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.763107] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1433.763107] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a0ecc4-f32c-00e1-83da-fa199db1f0c7" [ 1433.763107] env[62974]: _type = "Task" [ 1433.763107] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.770012] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a0ecc4-f32c-00e1-83da-fa199db1f0c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.877951] env[62974]: DEBUG oslo_concurrency.lockutils [req-0419ddcb-ee3a-4be5-83ee-81a6c2d38e62 req-91bf5af2-8916-4c95-adae-77bdb90727c7 service nova] Releasing lock "refresh_cache-15226e5e-f6ae-418a-ab28-0e2d69cbe24d" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.273530] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]52a0ecc4-f32c-00e1-83da-fa199db1f0c7, 'name': SearchDatastore_Task, 'duration_secs': 0.007924} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.274278] env[62974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ba85913-4a97-4ef7-99fd-29dbe94028f4 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.279709] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1434.279709] env[62974]: value = "session[524ad32a-e631-7d4d-08da-2ec507e74f84]528ded5f-8c0a-5615-9ea9-a193ca65eded" [ 1434.279709] env[62974]: _type = "Task" [ 1434.279709] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.286691] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528ded5f-8c0a-5615-9ea9-a193ca65eded, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.789559] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': session[524ad32a-e631-7d4d-08da-2ec507e74f84]528ded5f-8c0a-5615-9ea9-a193ca65eded, 'name': SearchDatastore_Task, 'duration_secs': 0.009606} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.789898] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk" {{(pid=62974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.790047] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 15226e5e-f6ae-418a-ab28-0e2d69cbe24d/15226e5e-f6ae-418a-ab28-0e2d69cbe24d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1434.790293] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-661deded-732c-4b8b-b6d5-2cc18822b720 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.796729] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1434.796729] env[62974]: value = "task-2655348" [ 1434.796729] env[62974]: _type = "Task" [ 1434.796729] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.803822] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.306238] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655348, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455926} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.306523] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/807f8582-499f-47ee-9d5b-755c9f39bc39/807f8582-499f-47ee-9d5b-755c9f39bc39.vmdk to [datastore2] 15226e5e-f6ae-418a-ab28-0e2d69cbe24d/15226e5e-f6ae-418a-ab28-0e2d69cbe24d.vmdk {{(pid=62974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.306688] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Extending root virtual disk to 1048576 {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1435.306933] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffb23acd-a2f9-41d2-9426-638877a8f897 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.312832] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1435.312832] env[62974]: value = "task-2655349" [ 1435.312832] env[62974]: _type = "Task" [ 1435.312832] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.319514] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655349, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.822550] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655349, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062402} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.822842] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Extended root virtual disk {{(pid=62974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1435.823517] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f422a3cf-1b44-489d-9a5a-8671815a302b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.844014] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 15226e5e-f6ae-418a-ab28-0e2d69cbe24d/15226e5e-f6ae-418a-ab28-0e2d69cbe24d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.844254] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adcacfc8-b892-49bf-bbd0-47ae1566de32 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.862329] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1435.862329] env[62974]: value = "task-2655350" [ 1435.862329] env[62974]: _type = "Task" [ 1435.862329] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.869570] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655350, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.372358] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655350, 'name': ReconfigVM_Task, 'duration_secs': 0.276859} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.372700] env[62974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 15226e5e-f6ae-418a-ab28-0e2d69cbe24d/15226e5e-f6ae-418a-ab28-0e2d69cbe24d.vmdk or device None with type sparse {{(pid=62974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1436.373355] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21e02315-7ed7-45ea-bf40-7a35eb7bcfa5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.379655] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1436.379655] env[62974]: value = "task-2655351" [ 1436.379655] env[62974]: _type = "Task" [ 1436.379655] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.389955] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655351, 'name': Rename_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.888967] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655351, 'name': Rename_Task, 'duration_secs': 0.172327} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.889272] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Powering on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.889534] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e72f229a-40ea-4e23-a45c-ee508c484203 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.895588] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1436.895588] env[62974]: value = "task-2655352" [ 1436.895588] env[62974]: _type = "Task" [ 1436.895588] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.902803] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.405458] env[62974]: DEBUG oslo_vmware.api [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655352, 'name': PowerOnVM_Task, 'duration_secs': 0.431207} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.405722] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Powered on the VM {{(pid=62974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.405910] env[62974]: INFO nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Took 7.05 seconds to spawn the instance on the hypervisor. [ 1437.406096] env[62974]: DEBUG nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1437.406827] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d8a45b-0a06-4e0f-8abb-aa67ca207025 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.923184] env[62974]: INFO nova.compute.manager [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Took 11.71 seconds to build instance. [ 1438.424758] env[62974]: DEBUG oslo_concurrency.lockutils [None req-1c34e179-c5c3-42fd-8696-64c7e1daa6ef tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.220s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.769312] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.769575] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.769752] env[62974]: DEBUG nova.compute.manager [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1438.770677] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901d1f79-5446-41de-9bcf-d96bc8ad3306 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.777666] env[62974]: DEBUG nova.compute.manager [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1438.778229] env[62974]: DEBUG nova.objects.instance [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'flavor' on Instance uuid 15226e5e-f6ae-418a-ab28-0e2d69cbe24d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1439.785124] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.785542] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b965ec0b-e4e0-43b0-9dc0-4f79d5714105 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.792766] env[62974]: DEBUG oslo_vmware.api [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1439.792766] env[62974]: value = "task-2655353" [ 1439.792766] env[62974]: _type = "Task" [ 1439.792766] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.801039] env[62974]: DEBUG oslo_vmware.api [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.301955] env[62974]: DEBUG oslo_vmware.api [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655353, 'name': PowerOffVM_Task, 'duration_secs': 0.177708} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.302233] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1440.302425] env[62974]: DEBUG nova.compute.manager [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Checking state {{(pid=62974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1440.303200] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4540da6-9539-4d70-a535-829deba8ddb5 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.815225] env[62974]: DEBUG oslo_concurrency.lockutils [None req-b7170dd0-1ea3-4607-a225-d2039a37e9b8 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.045s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.315287] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.315520] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.315811] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.316019] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.316191] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.318213] env[62974]: INFO nova.compute.manager [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Terminating instance [ 1441.822272] env[62974]: DEBUG nova.compute.manager [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1441.822710] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1441.823406] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9e434f-d1de-4860-8ddc-49898f05bfd6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.831042] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1441.831295] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92335f3e-f3e7-407a-b026-b3fe87551f51 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.905699] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1441.905969] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Deleting contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1441.906109] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleting the datastore file [datastore2] 15226e5e-f6ae-418a-ab28-0e2d69cbe24d {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1441.906368] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9ff4da5-8767-4c54-a33f-19bc985faa2c {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.912842] env[62974]: DEBUG oslo_vmware.api [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1441.912842] env[62974]: value = "task-2655355" [ 1441.912842] env[62974]: _type = "Task" [ 1441.912842] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.920327] env[62974]: DEBUG oslo_vmware.api [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.422373] env[62974]: DEBUG oslo_vmware.api [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156082} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.424459] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1442.424459] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Deleted contents of the VM from datastore datastore2 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1442.424459] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1442.424459] env[62974]: INFO nova.compute.manager [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1442.424459] env[62974]: DEBUG oslo.service.loopingcall [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1442.424459] env[62974]: DEBUG nova.compute.manager [-] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1442.424459] env[62974]: DEBUG nova.network.neutron [-] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1442.725764] env[62974]: DEBUG nova.compute.manager [req-d8519f38-cd6f-4713-b95e-36b0fb268961 req-3a8c8fdf-c2de-44c7-801a-85a8c5105593 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Received event network-vif-deleted-7f40b09d-0576-4af9-87a9-c0eb42af3847 {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1442.725861] env[62974]: INFO nova.compute.manager [req-d8519f38-cd6f-4713-b95e-36b0fb268961 req-3a8c8fdf-c2de-44c7-801a-85a8c5105593 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Neutron deleted interface 7f40b09d-0576-4af9-87a9-c0eb42af3847; detaching it from the instance and deleting it from the info cache [ 1442.725977] env[62974]: DEBUG nova.network.neutron [req-d8519f38-cd6f-4713-b95e-36b0fb268961 req-3a8c8fdf-c2de-44c7-801a-85a8c5105593 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.184945] env[62974]: DEBUG nova.network.neutron [-] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.228941] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aae2da47-d3d0-4232-be7c-cd0b75f5930e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.238909] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181daa86-8ff9-4ce7-926e-224dca05035e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.262086] env[62974]: DEBUG nova.compute.manager [req-d8519f38-cd6f-4713-b95e-36b0fb268961 req-3a8c8fdf-c2de-44c7-801a-85a8c5105593 service nova] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Detach interface failed, port_id=7f40b09d-0576-4af9-87a9-c0eb42af3847, reason: Instance 15226e5e-f6ae-418a-ab28-0e2d69cbe24d could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1443.687326] env[62974]: INFO nova.compute.manager [-] [instance: 15226e5e-f6ae-418a-ab28-0e2d69cbe24d] Took 1.26 seconds to deallocate network for instance. [ 1444.194051] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.194365] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.194550] env[62974]: DEBUG nova.objects.instance [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'resources' on Instance uuid 15226e5e-f6ae-418a-ab28-0e2d69cbe24d {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.741552] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adb7245-5b51-4fd6-87d2-18e45ea78d6e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.748769] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39680b3e-8750-4230-bfd8-abb4add8e27d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.778148] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d537d5c1-1d24-4578-9823-c4066ba11507 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.784738] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41bebf0-231e-458e-9f57-0aaca2176820 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.797397] env[62974]: DEBUG nova.compute.provider_tree [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.300012] env[62974]: DEBUG nova.scheduler.client.report [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1445.805442] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.826647] env[62974]: INFO nova.scheduler.client.report [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted allocations for instance 15226e5e-f6ae-418a-ab28-0e2d69cbe24d [ 1446.336919] env[62974]: DEBUG oslo_concurrency.lockutils [None req-65aff287-26b7-46b0-97de-c54970b1e1e3 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "15226e5e-f6ae-418a-ab28-0e2d69cbe24d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.021s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.023518] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "df3a9d82-1563-4960-a69a-870b3d440081" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.023839] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "df3a9d82-1563-4960-a69a-870b3d440081" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.024080] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "df3a9d82-1563-4960-a69a-870b3d440081-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.024272] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "df3a9d82-1563-4960-a69a-870b3d440081-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.024504] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "df3a9d82-1563-4960-a69a-870b3d440081-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.026587] env[62974]: INFO nova.compute.manager [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Terminating instance [ 1447.529888] env[62974]: DEBUG nova.compute.manager [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Start destroying the instance on the hypervisor. {{(pid=62974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1447.530444] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Destroying instance {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1447.531298] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e12b85-2e4c-4c18-ad86-f99facce7e10 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.539075] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Powering off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1447.539296] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cadf6900-90e1-4373-b81d-5d227339ad76 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.544458] env[62974]: DEBUG oslo_vmware.api [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1447.544458] env[62974]: value = "task-2655356" [ 1447.544458] env[62974]: _type = "Task" [ 1447.544458] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.551441] env[62974]: DEBUG oslo_vmware.api [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.055845] env[62974]: DEBUG oslo_vmware.api [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655356, 'name': PowerOffVM_Task, 'duration_secs': 0.185429} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.056113] env[62974]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Powered off the VM {{(pid=62974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1448.056283] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Unregistering the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1448.056514] env[62974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9ea6d51-116b-465d-bd99-296d46495c8d {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.119783] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Unregistered the VM {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1448.120045] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Deleting contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1448.120184] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleting the datastore file [datastore1] df3a9d82-1563-4960-a69a-870b3d440081 {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1448.120433] env[62974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aecc44ba-dc94-4ed2-a782-0db84f839454 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.126831] env[62974]: DEBUG oslo_vmware.api [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for the task: (returnval){ [ 1448.126831] env[62974]: value = "task-2655358" [ 1448.126831] env[62974]: _type = "Task" [ 1448.126831] env[62974]: } to complete. {{(pid=62974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.134257] env[62974]: DEBUG oslo_vmware.api [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.637223] env[62974]: DEBUG oslo_vmware.api [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Task: {'id': task-2655358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136188} completed successfully. {{(pid=62974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.637664] env[62974]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted the datastore file {{(pid=62974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1448.637664] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Deleted contents of the VM from datastore datastore1 {{(pid=62974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1448.637822] env[62974]: DEBUG nova.virt.vmwareapi.vmops [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Instance destroyed {{(pid=62974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1448.637991] env[62974]: INFO nova.compute.manager [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1448.638245] env[62974]: DEBUG oslo.service.loopingcall [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1448.638434] env[62974]: DEBUG nova.compute.manager [-] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Deallocating network for instance {{(pid=62974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1448.638524] env[62974]: DEBUG nova.network.neutron [-] [instance: df3a9d82-1563-4960-a69a-870b3d440081] deallocate_for_instance() {{(pid=62974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1448.887095] env[62974]: DEBUG nova.compute.manager [req-f4cb70b2-fac7-4b93-80a6-8326937731aa req-bdc0bb40-df06-4f71-9519-dd58741aa879 service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Received event network-vif-deleted-dfce7e27-d5b3-43be-b3ab-52006b1587bd {{(pid=62974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1448.887095] env[62974]: INFO nova.compute.manager [req-f4cb70b2-fac7-4b93-80a6-8326937731aa req-bdc0bb40-df06-4f71-9519-dd58741aa879 service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Neutron deleted interface dfce7e27-d5b3-43be-b3ab-52006b1587bd; detaching it from the instance and deleting it from the info cache [ 1448.887462] env[62974]: DEBUG nova.network.neutron [req-f4cb70b2-fac7-4b93-80a6-8326937731aa req-bdc0bb40-df06-4f71-9519-dd58741aa879 service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.370788] env[62974]: DEBUG nova.network.neutron [-] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Updating instance_info_cache with network_info: [] {{(pid=62974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.389519] env[62974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2364f9fa-12bb-4cbd-8cd6-b54dbef32d36 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.398811] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebf5eba-b355-4c6c-b3f6-f6c4c5d05632 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.421690] env[62974]: DEBUG nova.compute.manager [req-f4cb70b2-fac7-4b93-80a6-8326937731aa req-bdc0bb40-df06-4f71-9519-dd58741aa879 service nova] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Detach interface failed, port_id=dfce7e27-d5b3-43be-b3ab-52006b1587bd, reason: Instance df3a9d82-1563-4960-a69a-870b3d440081 could not be found. {{(pid=62974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1449.873718] env[62974]: INFO nova.compute.manager [-] [instance: df3a9d82-1563-4960-a69a-870b3d440081] Took 1.24 seconds to deallocate network for instance. [ 1450.007512] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1450.381991] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.382257] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.382490] env[62974]: DEBUG nova.objects.instance [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lazy-loading 'resources' on Instance uuid df3a9d82-1563-4960-a69a-870b3d440081 {{(pid=62974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1450.916023] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1177047-1449-4253-a122-ad6c059e798b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.923771] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f27daae-3707-4ad2-8eb1-d2e461ac419b {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.953302] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcab67b-060a-4647-9734-f21caba4116e {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.960341] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a382785-b095-4d6b-ada2-04340e9bf88f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.973261] env[62974]: DEBUG nova.compute.provider_tree [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.477043] env[62974]: DEBUG nova.scheduler.client.report [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1451.981787] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.599s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.000376] env[62974]: INFO nova.scheduler.client.report [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Deleted allocations for instance df3a9d82-1563-4960-a69a-870b3d440081 [ 1452.509635] env[62974]: DEBUG oslo_concurrency.lockutils [None req-29ae2e29-c77f-47e8-a9ff-6db040fcc119 tempest-ServersTestJSON-241726719 tempest-ServersTestJSON-241726719-project-member] Lock "df3a9d82-1563-4960-a69a-870b3d440081" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.486s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.008747] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.007993] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.510097] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.510398] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.510447] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.510589] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1454.511501] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50d60be-baac-40ea-8b2b-e8f1b887e491 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.519614] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb13c2f0-595e-4d23-bd3b-d850c339049f {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.533296] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3dcd97-09c9-49df-854b-27f88e27ac07 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.539934] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837e1db0-0c12-4e3d-bf0a-ac3da2ccf6f0 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.569411] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180908MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=62974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1454.569566] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.569754] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.589469] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1455.589677] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1455.602463] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3132d45-9f08-4101-87e6-90d980ff64b8 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.610616] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb393a5-31e4-485f-8770-1cbc309b1bd6 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.640841] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb91eb0c-f7c4-41fc-9add-5ab81c902715 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.647918] env[62974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b0b287-7ea5-4d6a-9541-fbdc00b3dcb2 {{(pid=62974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.660788] env[62974]: DEBUG nova.compute.provider_tree [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed in ProviderTree for provider: bd3bd9ae-180c-41cf-831e-3dd3892efa18 {{(pid=62974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1456.163835] env[62974]: DEBUG nova.scheduler.client.report [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Inventory has not changed for provider bd3bd9ae-180c-41cf-831e-3dd3892efa18 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1456.669091] env[62974]: DEBUG nova.compute.resource_tracker [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1456.669477] env[62974]: DEBUG oslo_concurrency.lockutils [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.100s {{(pid=62974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.669307] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.669650] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.669700] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Starting heal instance info cache {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1457.669778] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Rebuilding the list of instances to heal {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1458.172319] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Didn't find any instances for network info cache update. {{(pid=62974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1458.172543] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.172715] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.172902] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.173075] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.173210] env[62974]: DEBUG nova.compute.manager [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1463.505775] env[62974]: DEBUG oslo_service.periodic_task [None req-34a52b1f-7afd-4b23-a529-1e18a75a8bdf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}